##// END OF EJS Templates
core: removed pyro4 from Enterprise code. Fixes #5198
marcink -
r1409:c1ce56be default
parent child Browse files
Show More
@@ -1,681 +1,678 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 use = egg:waitress#main
54 use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 threads = 5
56 threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 max_request_body_size = 107374182400
58 max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 asyncore_use_poll = true
61 asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 #use = egg:gunicorn#main
69 #use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 #workers = 2
74 #workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommened to be at 1
76 ## generally recommened to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 #proc_name = rhodecode
79 #proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 #worker_class = sync
82 #worker_class = sync
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 #max_requests = 1000
87 #max_requests = 1000
88 #max_requests_jitter = 30
88 #max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 #timeout = 21600
91 #timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 # During development the we want to have the debug toolbar enabled
111 # During development the we want to have the debug toolbar enabled
112 pyramid.includes =
112 pyramid.includes =
113 pyramid_debugtoolbar
113 pyramid_debugtoolbar
114 rhodecode.utils.debugtoolbar
114 rhodecode.utils.debugtoolbar
115 rhodecode.lib.middleware.request_wrapper
115 rhodecode.lib.middleware.request_wrapper
116
116
117 pyramid.reload_templates = true
117 pyramid.reload_templates = true
118
118
119 debugtoolbar.hosts = 0.0.0.0/0
119 debugtoolbar.hosts = 0.0.0.0/0
120 debugtoolbar.exclude_prefixes =
120 debugtoolbar.exclude_prefixes =
121 /css
121 /css
122 /fonts
122 /fonts
123 /images
123 /images
124 /js
124 /js
125
125
126 ## RHODECODE PLUGINS ##
126 ## RHODECODE PLUGINS ##
127 rhodecode.includes =
127 rhodecode.includes =
128 rhodecode.api
128 rhodecode.api
129
129
130
130
131 # api prefix url
131 # api prefix url
132 rhodecode.api.url = /_admin/api
132 rhodecode.api.url = /_admin/api
133
133
134
134
135 ## END RHODECODE PLUGINS ##
135 ## END RHODECODE PLUGINS ##
136
136
137 ## encryption key used to encrypt social plugin tokens,
137 ## encryption key used to encrypt social plugin tokens,
138 ## remote_urls with credentials etc, if not set it defaults to
138 ## remote_urls with credentials etc, if not set it defaults to
139 ## `beaker.session.secret`
139 ## `beaker.session.secret`
140 #rhodecode.encrypted_values.secret =
140 #rhodecode.encrypted_values.secret =
141
141
142 ## decryption strict mode (enabled by default). It controls if decryption raises
142 ## decryption strict mode (enabled by default). It controls if decryption raises
143 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
144 #rhodecode.encrypted_values.strict = false
144 #rhodecode.encrypted_values.strict = false
145
145
146 ## return gzipped responses from Rhodecode (static files/application)
146 ## return gzipped responses from Rhodecode (static files/application)
147 gzip_responses = false
147 gzip_responses = false
148
148
149 ## autogenerate javascript routes file on startup
149 ## autogenerate javascript routes file on startup
150 generate_js_files = false
150 generate_js_files = false
151
151
152 ## Optional Languages
152 ## Optional Languages
153 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
153 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
154 lang = en
154 lang = en
155
155
156 ## perform a full repository scan on each server start, this should be
156 ## perform a full repository scan on each server start, this should be
157 ## set to false after first startup, to allow faster server restarts.
157 ## set to false after first startup, to allow faster server restarts.
158 startup.import_repos = false
158 startup.import_repos = false
159
159
160 ## Uncomment and set this path to use archive download cache.
160 ## Uncomment and set this path to use archive download cache.
161 ## Once enabled, generated archives will be cached at this location
161 ## Once enabled, generated archives will be cached at this location
162 ## and served from the cache during subsequent requests for the same archive of
162 ## and served from the cache during subsequent requests for the same archive of
163 ## the repository.
163 ## the repository.
164 #archive_cache_dir = /tmp/tarballcache
164 #archive_cache_dir = /tmp/tarballcache
165
165
166 ## change this to unique ID for security
166 ## change this to unique ID for security
167 app_instance_uuid = rc-production
167 app_instance_uuid = rc-production
168
168
169 ## cut off limit for large diffs (size in bytes)
169 ## cut off limit for large diffs (size in bytes)
170 cut_off_limit_diff = 1024000
170 cut_off_limit_diff = 1024000
171 cut_off_limit_file = 256000
171 cut_off_limit_file = 256000
172
172
173 ## use cache version of scm repo everywhere
173 ## use cache version of scm repo everywhere
174 vcs_full_cache = true
174 vcs_full_cache = true
175
175
176 ## force https in RhodeCode, fixes https redirects, assumes it's always https
176 ## force https in RhodeCode, fixes https redirects, assumes it's always https
177 ## Normally this is controlled by proper http flags sent from http server
177 ## Normally this is controlled by proper http flags sent from http server
178 force_https = false
178 force_https = false
179
179
180 ## use Strict-Transport-Security headers
180 ## use Strict-Transport-Security headers
181 use_htsts = false
181 use_htsts = false
182
182
183 ## number of commits stats will parse on each iteration
183 ## number of commits stats will parse on each iteration
184 commit_parse_limit = 25
184 commit_parse_limit = 25
185
185
186 ## git rev filter option, --all is the default filter, if you need to
186 ## git rev filter option, --all is the default filter, if you need to
187 ## hide all refs in changelog switch this to --branches --tags
187 ## hide all refs in changelog switch this to --branches --tags
188 git_rev_filter = --branches --tags
188 git_rev_filter = --branches --tags
189
189
190 # Set to true if your repos are exposed using the dumb protocol
190 # Set to true if your repos are exposed using the dumb protocol
191 git_update_server_info = false
191 git_update_server_info = false
192
192
193 ## RSS/ATOM feed options
193 ## RSS/ATOM feed options
194 rss_cut_off_limit = 256000
194 rss_cut_off_limit = 256000
195 rss_items_per_page = 10
195 rss_items_per_page = 10
196 rss_include_diff = false
196 rss_include_diff = false
197
197
198 ## gist URL alias, used to create nicer urls for gist. This should be an
198 ## gist URL alias, used to create nicer urls for gist. This should be an
199 ## url that does rewrites to _admin/gists/{gistid}.
199 ## url that does rewrites to _admin/gists/{gistid}.
200 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
200 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
201 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
201 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
202 gist_alias_url =
202 gist_alias_url =
203
203
204 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
204 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
205 ## used for access.
205 ## used for access.
206 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
206 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
207 ## came from the the logged in user who own this authentication token.
207 ## came from the the logged in user who own this authentication token.
208 ##
208 ##
209 ## Syntax is ControllerClass:function_pattern.
209 ## Syntax is ControllerClass:function_pattern.
210 ## To enable access to raw_files put `FilesController:raw`.
210 ## To enable access to raw_files put `FilesController:raw`.
211 ## To enable access to patches add `ChangesetController:changeset_patch`.
211 ## To enable access to patches add `ChangesetController:changeset_patch`.
212 ## The list should be "," separated and on a single line.
212 ## The list should be "," separated and on a single line.
213 ##
213 ##
214 ## Recommended controllers to enable:
214 ## Recommended controllers to enable:
215 # ChangesetController:changeset_patch,
215 # ChangesetController:changeset_patch,
216 # ChangesetController:changeset_raw,
216 # ChangesetController:changeset_raw,
217 # FilesController:raw,
217 # FilesController:raw,
218 # FilesController:archivefile,
218 # FilesController:archivefile,
219 # GistsController:*,
219 # GistsController:*,
220 api_access_controllers_whitelist =
220 api_access_controllers_whitelist =
221
221
222 ## default encoding used to convert from and to unicode
222 ## default encoding used to convert from and to unicode
223 ## can be also a comma separated list of encoding in case of mixed encodings
223 ## can be also a comma separated list of encoding in case of mixed encodings
224 default_encoding = UTF-8
224 default_encoding = UTF-8
225
225
226 ## instance-id prefix
226 ## instance-id prefix
227 ## a prefix key for this instance used for cache invalidation when running
227 ## a prefix key for this instance used for cache invalidation when running
228 ## multiple instances of rhodecode, make sure it's globally unique for
228 ## multiple instances of rhodecode, make sure it's globally unique for
229 ## all running rhodecode instances. Leave empty if you don't use it
229 ## all running rhodecode instances. Leave empty if you don't use it
230 instance_id =
230 instance_id =
231
231
232 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
232 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
233 ## of an authentication plugin also if it is disabled by it's settings.
233 ## of an authentication plugin also if it is disabled by it's settings.
234 ## This could be useful if you are unable to log in to the system due to broken
234 ## This could be useful if you are unable to log in to the system due to broken
235 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
235 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
236 ## module to log in again and fix the settings.
236 ## module to log in again and fix the settings.
237 ##
237 ##
238 ## Available builtin plugin IDs (hash is part of the ID):
238 ## Available builtin plugin IDs (hash is part of the ID):
239 ## egg:rhodecode-enterprise-ce#rhodecode
239 ## egg:rhodecode-enterprise-ce#rhodecode
240 ## egg:rhodecode-enterprise-ce#pam
240 ## egg:rhodecode-enterprise-ce#pam
241 ## egg:rhodecode-enterprise-ce#ldap
241 ## egg:rhodecode-enterprise-ce#ldap
242 ## egg:rhodecode-enterprise-ce#jasig_cas
242 ## egg:rhodecode-enterprise-ce#jasig_cas
243 ## egg:rhodecode-enterprise-ce#headers
243 ## egg:rhodecode-enterprise-ce#headers
244 ## egg:rhodecode-enterprise-ce#crowd
244 ## egg:rhodecode-enterprise-ce#crowd
245 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
245 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
246
246
247 ## alternative return HTTP header for failed authentication. Default HTTP
247 ## alternative return HTTP header for failed authentication. Default HTTP
248 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
248 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
249 ## handling that causing a series of failed authentication calls.
249 ## handling that causing a series of failed authentication calls.
250 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
250 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
251 ## This will be served instead of default 401 on bad authnetication
251 ## This will be served instead of default 401 on bad authnetication
252 auth_ret_code =
252 auth_ret_code =
253
253
254 ## use special detection method when serving auth_ret_code, instead of serving
254 ## use special detection method when serving auth_ret_code, instead of serving
255 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
255 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
256 ## and then serve auth_ret_code to clients
256 ## and then serve auth_ret_code to clients
257 auth_ret_code_detection = false
257 auth_ret_code_detection = false
258
258
259 ## locking return code. When repository is locked return this HTTP code. 2XX
259 ## locking return code. When repository is locked return this HTTP code. 2XX
260 ## codes don't break the transactions while 4XX codes do
260 ## codes don't break the transactions while 4XX codes do
261 lock_ret_code = 423
261 lock_ret_code = 423
262
262
263 ## allows to change the repository location in settings page
263 ## allows to change the repository location in settings page
264 allow_repo_location_change = true
264 allow_repo_location_change = true
265
265
266 ## allows to setup custom hooks in settings page
266 ## allows to setup custom hooks in settings page
267 allow_custom_hooks_settings = true
267 allow_custom_hooks_settings = true
268
268
269 ## generated license token, goto license page in RhodeCode settings to obtain
269 ## generated license token, goto license page in RhodeCode settings to obtain
270 ## new token
270 ## new token
271 license_token =
271 license_token =
272
272
273 ## supervisor connection uri, for managing supervisor and logs.
273 ## supervisor connection uri, for managing supervisor and logs.
274 supervisor.uri =
274 supervisor.uri =
275 ## supervisord group name/id we only want this RC instance to handle
275 ## supervisord group name/id we only want this RC instance to handle
276 supervisor.group_id = dev
276 supervisor.group_id = dev
277
277
278 ## Display extended labs settings
278 ## Display extended labs settings
279 labs_settings_active = true
279 labs_settings_active = true
280
280
281 ####################################
281 ####################################
282 ### CELERY CONFIG ####
282 ### CELERY CONFIG ####
283 ####################################
283 ####################################
284 use_celery = false
284 use_celery = false
285 broker.host = localhost
285 broker.host = localhost
286 broker.vhost = rabbitmqhost
286 broker.vhost = rabbitmqhost
287 broker.port = 5672
287 broker.port = 5672
288 broker.user = rabbitmq
288 broker.user = rabbitmq
289 broker.password = qweqwe
289 broker.password = qweqwe
290
290
291 celery.imports = rhodecode.lib.celerylib.tasks
291 celery.imports = rhodecode.lib.celerylib.tasks
292
292
293 celery.result.backend = amqp
293 celery.result.backend = amqp
294 celery.result.dburi = amqp://
294 celery.result.dburi = amqp://
295 celery.result.serialier = json
295 celery.result.serialier = json
296
296
297 #celery.send.task.error.emails = true
297 #celery.send.task.error.emails = true
298 #celery.amqp.task.result.expires = 18000
298 #celery.amqp.task.result.expires = 18000
299
299
300 celeryd.concurrency = 2
300 celeryd.concurrency = 2
301 #celeryd.log.file = celeryd.log
301 #celeryd.log.file = celeryd.log
302 celeryd.log.level = debug
302 celeryd.log.level = debug
303 celeryd.max.tasks.per.child = 1
303 celeryd.max.tasks.per.child = 1
304
304
305 ## tasks will never be sent to the queue, but executed locally instead.
305 ## tasks will never be sent to the queue, but executed locally instead.
306 celery.always.eager = false
306 celery.always.eager = false
307
307
308 ####################################
308 ####################################
309 ### BEAKER CACHE ####
309 ### BEAKER CACHE ####
310 ####################################
310 ####################################
311 # default cache dir for templates. Putting this into a ramdisk
311 # default cache dir for templates. Putting this into a ramdisk
312 ## can boost performance, eg. %(here)s/data_ramdisk
312 ## can boost performance, eg. %(here)s/data_ramdisk
313 cache_dir = %(here)s/data
313 cache_dir = %(here)s/data
314
314
315 ## locking and default file storage for Beaker. Putting this into a ramdisk
315 ## locking and default file storage for Beaker. Putting this into a ramdisk
316 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
316 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
317 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
317 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
318 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
318 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
319
319
320 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
320 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
321
321
322 beaker.cache.super_short_term.type = memory
322 beaker.cache.super_short_term.type = memory
323 beaker.cache.super_short_term.expire = 10
323 beaker.cache.super_short_term.expire = 10
324 beaker.cache.super_short_term.key_length = 256
324 beaker.cache.super_short_term.key_length = 256
325
325
326 beaker.cache.short_term.type = memory
326 beaker.cache.short_term.type = memory
327 beaker.cache.short_term.expire = 60
327 beaker.cache.short_term.expire = 60
328 beaker.cache.short_term.key_length = 256
328 beaker.cache.short_term.key_length = 256
329
329
330 beaker.cache.long_term.type = memory
330 beaker.cache.long_term.type = memory
331 beaker.cache.long_term.expire = 36000
331 beaker.cache.long_term.expire = 36000
332 beaker.cache.long_term.key_length = 256
332 beaker.cache.long_term.key_length = 256
333
333
334 beaker.cache.sql_cache_short.type = memory
334 beaker.cache.sql_cache_short.type = memory
335 beaker.cache.sql_cache_short.expire = 10
335 beaker.cache.sql_cache_short.expire = 10
336 beaker.cache.sql_cache_short.key_length = 256
336 beaker.cache.sql_cache_short.key_length = 256
337
337
338 ## default is memory cache, configure only if required
338 ## default is memory cache, configure only if required
339 ## using multi-node or multi-worker setup
339 ## using multi-node or multi-worker setup
340 #beaker.cache.auth_plugins.type = ext:database
340 #beaker.cache.auth_plugins.type = ext:database
341 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
341 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
342 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
342 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
343 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
343 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
344 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
344 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
345 #beaker.cache.auth_plugins.sa.pool_size = 10
345 #beaker.cache.auth_plugins.sa.pool_size = 10
346 #beaker.cache.auth_plugins.sa.max_overflow = 0
346 #beaker.cache.auth_plugins.sa.max_overflow = 0
347
347
348 beaker.cache.repo_cache_long.type = memorylru_base
348 beaker.cache.repo_cache_long.type = memorylru_base
349 beaker.cache.repo_cache_long.max_items = 4096
349 beaker.cache.repo_cache_long.max_items = 4096
350 beaker.cache.repo_cache_long.expire = 2592000
350 beaker.cache.repo_cache_long.expire = 2592000
351
351
352 ## default is memorylru_base cache, configure only if required
352 ## default is memorylru_base cache, configure only if required
353 ## using multi-node or multi-worker setup
353 ## using multi-node or multi-worker setup
354 #beaker.cache.repo_cache_long.type = ext:memcached
354 #beaker.cache.repo_cache_long.type = ext:memcached
355 #beaker.cache.repo_cache_long.url = localhost:11211
355 #beaker.cache.repo_cache_long.url = localhost:11211
356 #beaker.cache.repo_cache_long.expire = 1209600
356 #beaker.cache.repo_cache_long.expire = 1209600
357 #beaker.cache.repo_cache_long.key_length = 256
357 #beaker.cache.repo_cache_long.key_length = 256
358
358
359 ####################################
359 ####################################
360 ### BEAKER SESSION ####
360 ### BEAKER SESSION ####
361 ####################################
361 ####################################
362
362
363 ## .session.type is type of storage options for the session, current allowed
363 ## .session.type is type of storage options for the session, current allowed
364 ## types are file, ext:memcached, ext:database, and memory (default).
364 ## types are file, ext:memcached, ext:database, and memory (default).
365 beaker.session.type = file
365 beaker.session.type = file
366 beaker.session.data_dir = %(here)s/data/sessions/data
366 beaker.session.data_dir = %(here)s/data/sessions/data
367
367
368 ## db based session, fast, and allows easy management over logged in users
368 ## db based session, fast, and allows easy management over logged in users
369 #beaker.session.type = ext:database
369 #beaker.session.type = ext:database
370 #beaker.session.table_name = db_session
370 #beaker.session.table_name = db_session
371 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
371 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
372 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
372 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
373 #beaker.session.sa.pool_recycle = 3600
373 #beaker.session.sa.pool_recycle = 3600
374 #beaker.session.sa.echo = false
374 #beaker.session.sa.echo = false
375
375
376 beaker.session.key = rhodecode
376 beaker.session.key = rhodecode
377 beaker.session.secret = develop-rc-uytcxaz
377 beaker.session.secret = develop-rc-uytcxaz
378 beaker.session.lock_dir = %(here)s/data/sessions/lock
378 beaker.session.lock_dir = %(here)s/data/sessions/lock
379
379
380 ## Secure encrypted cookie. Requires AES and AES python libraries
380 ## Secure encrypted cookie. Requires AES and AES python libraries
381 ## you must disable beaker.session.secret to use this
381 ## you must disable beaker.session.secret to use this
382 #beaker.session.encrypt_key = key_for_encryption
382 #beaker.session.encrypt_key = key_for_encryption
383 #beaker.session.validate_key = validation_key
383 #beaker.session.validate_key = validation_key
384
384
385 ## sets session as invalid(also logging out user) if it haven not been
385 ## sets session as invalid(also logging out user) if it haven not been
386 ## accessed for given amount of time in seconds
386 ## accessed for given amount of time in seconds
387 beaker.session.timeout = 2592000
387 beaker.session.timeout = 2592000
388 beaker.session.httponly = true
388 beaker.session.httponly = true
389 ## Path to use for the cookie. Set to prefix if you use prefix middleware
389 ## Path to use for the cookie. Set to prefix if you use prefix middleware
390 #beaker.session.cookie_path = /custom_prefix
390 #beaker.session.cookie_path = /custom_prefix
391
391
392 ## uncomment for https secure cookie
392 ## uncomment for https secure cookie
393 beaker.session.secure = false
393 beaker.session.secure = false
394
394
395 ## auto save the session to not to use .save()
395 ## auto save the session to not to use .save()
396 beaker.session.auto = false
396 beaker.session.auto = false
397
397
398 ## default cookie expiration time in seconds, set to `true` to set expire
398 ## default cookie expiration time in seconds, set to `true` to set expire
399 ## at browser close
399 ## at browser close
400 #beaker.session.cookie_expires = 3600
400 #beaker.session.cookie_expires = 3600
401
401
402 ###################################
402 ###################################
403 ## SEARCH INDEXING CONFIGURATION ##
403 ## SEARCH INDEXING CONFIGURATION ##
404 ###################################
404 ###################################
405 ## Full text search indexer is available in rhodecode-tools under
405 ## Full text search indexer is available in rhodecode-tools under
406 ## `rhodecode-tools index` command
406 ## `rhodecode-tools index` command
407
407
408 ## WHOOSH Backend, doesn't require additional services to run
408 ## WHOOSH Backend, doesn't require additional services to run
409 ## it works good with few dozen repos
409 ## it works good with few dozen repos
410 search.module = rhodecode.lib.index.whoosh
410 search.module = rhodecode.lib.index.whoosh
411 search.location = %(here)s/data/index
411 search.location = %(here)s/data/index
412
412
413 ########################################
413 ########################################
414 ### CHANNELSTREAM CONFIG ####
414 ### CHANNELSTREAM CONFIG ####
415 ########################################
415 ########################################
416 ## channelstream enables persistent connections and live notification
416 ## channelstream enables persistent connections and live notification
417 ## in the system. It's also used by the chat system
417 ## in the system. It's also used by the chat system
418 channelstream.enabled = false
418 channelstream.enabled = false
419
419
420 ## server address for channelstream server on the backend
420 ## server address for channelstream server on the backend
421 channelstream.server = 127.0.0.1:9800
421 channelstream.server = 127.0.0.1:9800
422
422
423 ## location of the channelstream server from outside world
423 ## location of the channelstream server from outside world
424 ## use ws:// for http or wss:// for https. This address needs to be handled
424 ## use ws:// for http or wss:// for https. This address needs to be handled
425 ## by external HTTP server such as Nginx or Apache
425 ## by external HTTP server such as Nginx or Apache
426 ## see nginx/apache configuration examples in our docs
426 ## see nginx/apache configuration examples in our docs
427 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
427 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
428 channelstream.secret = secret
428 channelstream.secret = secret
429 channelstream.history.location = %(here)s/channelstream_history
429 channelstream.history.location = %(here)s/channelstream_history
430
430
431 ## Internal application path that Javascript uses to connect into.
431 ## Internal application path that Javascript uses to connect into.
432 ## If you use proxy-prefix the prefix should be added before /_channelstream
432 ## If you use proxy-prefix the prefix should be added before /_channelstream
433 channelstream.proxy_path = /_channelstream
433 channelstream.proxy_path = /_channelstream
434
434
435
435
436 ###################################
436 ###################################
437 ## APPENLIGHT CONFIG ##
437 ## APPENLIGHT CONFIG ##
438 ###################################
438 ###################################
439
439
440 ## Appenlight is tailored to work with RhodeCode, see
440 ## Appenlight is tailored to work with RhodeCode, see
441 ## http://appenlight.com for details how to obtain an account
441 ## http://appenlight.com for details how to obtain an account
442
442
443 ## appenlight integration enabled
443 ## appenlight integration enabled
444 appenlight = false
444 appenlight = false
445
445
446 appenlight.server_url = https://api.appenlight.com
446 appenlight.server_url = https://api.appenlight.com
447 appenlight.api_key = YOUR_API_KEY
447 appenlight.api_key = YOUR_API_KEY
448 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
448 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
449
449
450 # used for JS client
450 # used for JS client
451 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
451 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
452
452
453 ## TWEAK AMOUNT OF INFO SENT HERE
453 ## TWEAK AMOUNT OF INFO SENT HERE
454
454
455 ## enables 404 error logging (default False)
455 ## enables 404 error logging (default False)
456 appenlight.report_404 = false
456 appenlight.report_404 = false
457
457
458 ## time in seconds after request is considered being slow (default 1)
458 ## time in seconds after request is considered being slow (default 1)
459 appenlight.slow_request_time = 1
459 appenlight.slow_request_time = 1
460
460
461 ## record slow requests in application
461 ## record slow requests in application
462 ## (needs to be enabled for slow datastore recording and time tracking)
462 ## (needs to be enabled for slow datastore recording and time tracking)
463 appenlight.slow_requests = true
463 appenlight.slow_requests = true
464
464
465 ## enable hooking to application loggers
465 ## enable hooking to application loggers
466 appenlight.logging = true
466 appenlight.logging = true
467
467
468 ## minimum log level for log capture
468 ## minimum log level for log capture
469 appenlight.logging.level = WARNING
469 appenlight.logging.level = WARNING
470
470
471 ## send logs only from erroneous/slow requests
471 ## send logs only from erroneous/slow requests
472 ## (saves API quota for intensive logging)
472 ## (saves API quota for intensive logging)
473 appenlight.logging_on_error = false
473 appenlight.logging_on_error = false
474
474
475 ## list of additonal keywords that should be grabbed from environ object
475 ## list of additonal keywords that should be grabbed from environ object
476 ## can be string with comma separated list of words in lowercase
476 ## can be string with comma separated list of words in lowercase
477 ## (by default client will always send following info:
477 ## (by default client will always send following info:
478 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
478 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
479 ## start with HTTP* this list be extended with additional keywords here
479 ## start with HTTP* this list be extended with additional keywords here
480 appenlight.environ_keys_whitelist =
480 appenlight.environ_keys_whitelist =
481
481
482 ## list of keywords that should be blanked from request object
482 ## list of keywords that should be blanked from request object
483 ## can be string with comma separated list of words in lowercase
483 ## can be string with comma separated list of words in lowercase
484 ## (by default client will always blank keys that contain following words
484 ## (by default client will always blank keys that contain following words
485 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
485 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
486 ## this list be extended with additional keywords set here
486 ## this list be extended with additional keywords set here
487 appenlight.request_keys_blacklist =
487 appenlight.request_keys_blacklist =
488
488
489 ## list of namespaces that should be ignores when gathering log entries
489 ## list of namespaces that should be ignores when gathering log entries
490 ## can be string with comma separated list of namespaces
490 ## can be string with comma separated list of namespaces
491 ## (by default the client ignores own entries: appenlight_client.client)
491 ## (by default the client ignores own entries: appenlight_client.client)
492 appenlight.log_namespace_blacklist =
492 appenlight.log_namespace_blacklist =
493
493
494
494
495 ################################################################################
495 ################################################################################
496 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
496 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
497 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
497 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
498 ## execute malicious code after an exception is raised. ##
498 ## execute malicious code after an exception is raised. ##
499 ################################################################################
499 ################################################################################
500 #set debug = false
500 #set debug = false
501
501
502
502
503 ##############
503 ##############
504 ## STYLING ##
504 ## STYLING ##
505 ##############
505 ##############
506 debug_style = true
506 debug_style = true
507
507
508 ###########################################
508 ###########################################
509 ### MAIN RHODECODE DATABASE CONFIG ###
509 ### MAIN RHODECODE DATABASE CONFIG ###
510 ###########################################
510 ###########################################
511 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
511 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
512 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
512 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
513 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
513 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
514 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
514 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
515
515
516 # see sqlalchemy docs for other advanced settings
516 # see sqlalchemy docs for other advanced settings
517
517
518 ## print the sql statements to output
518 ## print the sql statements to output
519 sqlalchemy.db1.echo = false
519 sqlalchemy.db1.echo = false
520 ## recycle the connections after this amount of seconds
520 ## recycle the connections after this amount of seconds
521 sqlalchemy.db1.pool_recycle = 3600
521 sqlalchemy.db1.pool_recycle = 3600
522 sqlalchemy.db1.convert_unicode = true
522 sqlalchemy.db1.convert_unicode = true
523
523
524 ## the number of connections to keep open inside the connection pool.
524 ## the number of connections to keep open inside the connection pool.
525 ## 0 indicates no limit
525 ## 0 indicates no limit
526 #sqlalchemy.db1.pool_size = 5
526 #sqlalchemy.db1.pool_size = 5
527
527
528 ## the number of connections to allow in connection pool "overflow", that is
528 ## the number of connections to allow in connection pool "overflow", that is
529 ## connections that can be opened above and beyond the pool_size setting,
529 ## connections that can be opened above and beyond the pool_size setting,
530 ## which defaults to five.
530 ## which defaults to five.
531 #sqlalchemy.db1.max_overflow = 10
531 #sqlalchemy.db1.max_overflow = 10
532
532
533
533
534 ##################
534 ##################
535 ### VCS CONFIG ###
535 ### VCS CONFIG ###
536 ##################
536 ##################
537 vcs.server.enable = true
537 vcs.server.enable = true
538 vcs.server = localhost:9900
538 vcs.server = localhost:9900
539
539
540 ## Web server connectivity protocol, responsible for web based VCS operatations
540 ## Web server connectivity protocol, responsible for web based VCS operatations
541 ## Available protocols are:
541 ## Available protocols are:
542 ## `pyro4` - use pyro4 server
543 ## `http` - use http-rpc backend (default)
542 ## `http` - use http-rpc backend (default)
544 vcs.server.protocol = http
543 vcs.server.protocol = http
545
544
546 ## Push/Pull operations protocol, available options are:
545 ## Push/Pull operations protocol, available options are:
547 ## `pyro4` - use pyro4 server
548 ## `http` - use http-rpc backend (default)
546 ## `http` - use http-rpc backend (default)
549 ##
547 ##
550 vcs.scm_app_implementation = http
548 vcs.scm_app_implementation = http
551
549
552 ## Push/Pull operations hooks protocol, available options are:
550 ## Push/Pull operations hooks protocol, available options are:
553 ## `pyro4` - use pyro4 server
554 ## `http` - use http-rpc backend (default)
551 ## `http` - use http-rpc backend (default)
555 vcs.hooks.protocol = http
552 vcs.hooks.protocol = http
556
553
557 vcs.server.log_level = debug
554 vcs.server.log_level = debug
558 ## Start VCSServer with this instance as a subprocess, usefull for development
555 ## Start VCSServer with this instance as a subprocess, usefull for development
559 vcs.start_server = true
556 vcs.start_server = true
560
557
561 ## List of enabled VCS backends, available options are:
558 ## List of enabled VCS backends, available options are:
562 ## `hg` - mercurial
559 ## `hg` - mercurial
563 ## `git` - git
560 ## `git` - git
564 ## `svn` - subversion
561 ## `svn` - subversion
565 vcs.backends = hg, git, svn
562 vcs.backends = hg, git, svn
566
563
567 vcs.connection_timeout = 3600
564 vcs.connection_timeout = 3600
568 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
565 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
569 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
566 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
570 #vcs.svn.compatible_version = pre-1.8-compatible
567 #vcs.svn.compatible_version = pre-1.8-compatible
571
568
572
569
573 ############################################################
570 ############################################################
574 ### Subversion proxy support (mod_dav_svn) ###
571 ### Subversion proxy support (mod_dav_svn) ###
575 ### Maps RhodeCode repo groups into SVN paths for Apache ###
572 ### Maps RhodeCode repo groups into SVN paths for Apache ###
576 ############################################################
573 ############################################################
577 ## Enable or disable the config file generation.
574 ## Enable or disable the config file generation.
578 svn.proxy.generate_config = false
575 svn.proxy.generate_config = false
579 ## Generate config file with `SVNListParentPath` set to `On`.
576 ## Generate config file with `SVNListParentPath` set to `On`.
580 svn.proxy.list_parent_path = true
577 svn.proxy.list_parent_path = true
581 ## Set location and file name of generated config file.
578 ## Set location and file name of generated config file.
582 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
579 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
583 ## Used as a prefix to the `Location` block in the generated config file.
580 ## Used as a prefix to the `Location` block in the generated config file.
584 ## In most cases it should be set to `/`.
581 ## In most cases it should be set to `/`.
585 svn.proxy.location_root = /
582 svn.proxy.location_root = /
586 ## Command to reload the mod dav svn configuration on change.
583 ## Command to reload the mod dav svn configuration on change.
587 ## Example: `/etc/init.d/apache2 reload`
584 ## Example: `/etc/init.d/apache2 reload`
588 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
585 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
589 ## If the timeout expires before the reload command finishes, the command will
586 ## If the timeout expires before the reload command finishes, the command will
590 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
587 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
591 #svn.proxy.reload_timeout = 10
588 #svn.proxy.reload_timeout = 10
592
589
593 ## Dummy marker to add new entries after.
590 ## Dummy marker to add new entries after.
594 ## Add any custom entries below. Please don't remove.
591 ## Add any custom entries below. Please don't remove.
595 custom.conf = 1
592 custom.conf = 1
596
593
597
594
598 ################################
595 ################################
599 ### LOGGING CONFIGURATION ####
596 ### LOGGING CONFIGURATION ####
600 ################################
597 ################################
601 [loggers]
598 [loggers]
602 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
599 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
603
600
604 [handlers]
601 [handlers]
605 keys = console, console_sql
602 keys = console, console_sql
606
603
607 [formatters]
604 [formatters]
608 keys = generic, color_formatter, color_formatter_sql
605 keys = generic, color_formatter, color_formatter_sql
609
606
610 #############
607 #############
611 ## LOGGERS ##
608 ## LOGGERS ##
612 #############
609 #############
613 [logger_root]
610 [logger_root]
614 level = NOTSET
611 level = NOTSET
615 handlers = console
612 handlers = console
616
613
617 [logger_routes]
614 [logger_routes]
618 level = DEBUG
615 level = DEBUG
619 handlers =
616 handlers =
620 qualname = routes.middleware
617 qualname = routes.middleware
621 ## "level = DEBUG" logs the route matched and routing variables.
618 ## "level = DEBUG" logs the route matched and routing variables.
622 propagate = 1
619 propagate = 1
623
620
624 [logger_beaker]
621 [logger_beaker]
625 level = DEBUG
622 level = DEBUG
626 handlers =
623 handlers =
627 qualname = beaker.container
624 qualname = beaker.container
628 propagate = 1
625 propagate = 1
629
626
630 [logger_templates]
627 [logger_templates]
631 level = INFO
628 level = INFO
632 handlers =
629 handlers =
633 qualname = pylons.templating
630 qualname = pylons.templating
634 propagate = 1
631 propagate = 1
635
632
636 [logger_rhodecode]
633 [logger_rhodecode]
637 level = DEBUG
634 level = DEBUG
638 handlers =
635 handlers =
639 qualname = rhodecode
636 qualname = rhodecode
640 propagate = 1
637 propagate = 1
641
638
642 [logger_sqlalchemy]
639 [logger_sqlalchemy]
643 level = INFO
640 level = INFO
644 handlers = console_sql
641 handlers = console_sql
645 qualname = sqlalchemy.engine
642 qualname = sqlalchemy.engine
646 propagate = 0
643 propagate = 0
647
644
648 ##############
645 ##############
649 ## HANDLERS ##
646 ## HANDLERS ##
650 ##############
647 ##############
651
648
652 [handler_console]
649 [handler_console]
653 class = StreamHandler
650 class = StreamHandler
654 args = (sys.stderr, )
651 args = (sys.stderr, )
655 level = DEBUG
652 level = DEBUG
656 formatter = color_formatter
653 formatter = color_formatter
657
654
658 [handler_console_sql]
655 [handler_console_sql]
659 class = StreamHandler
656 class = StreamHandler
660 args = (sys.stderr, )
657 args = (sys.stderr, )
661 level = DEBUG
658 level = DEBUG
662 formatter = color_formatter_sql
659 formatter = color_formatter_sql
663
660
664 ################
661 ################
665 ## FORMATTERS ##
662 ## FORMATTERS ##
666 ################
663 ################
667
664
668 [formatter_generic]
665 [formatter_generic]
669 class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter
666 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
670 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
667 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
671 datefmt = %Y-%m-%d %H:%M:%S
668 datefmt = %Y-%m-%d %H:%M:%S
672
669
673 [formatter_color_formatter]
670 [formatter_color_formatter]
674 class = rhodecode.lib.logging_formatter.ColorFormatter
671 class = rhodecode.lib.logging_formatter.ColorFormatter
675 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
672 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
676 datefmt = %Y-%m-%d %H:%M:%S
673 datefmt = %Y-%m-%d %H:%M:%S
677
674
678 [formatter_color_formatter_sql]
675 [formatter_color_formatter_sql]
679 class = rhodecode.lib.logging_formatter.ColorFormatterSql
676 class = rhodecode.lib.logging_formatter.ColorFormatterSql
680 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
677 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
681 datefmt = %Y-%m-%d %H:%M:%S
678 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,650 +1,647 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 #use = egg:waitress#main
54 #use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 #threads = 5
56 #threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 #max_request_body_size = 107374182400
58 #max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 #asyncore_use_poll = true
61 #asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 use = egg:gunicorn#main
69 use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 workers = 2
74 workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommened to be at 1
76 ## generally recommened to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 proc_name = rhodecode
79 proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 worker_class = sync
82 worker_class = sync
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 max_requests = 1000
87 max_requests = 1000
88 max_requests_jitter = 30
88 max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 timeout = 21600
91 timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 ## encryption key used to encrypt social plugin tokens,
111 ## encryption key used to encrypt social plugin tokens,
112 ## remote_urls with credentials etc, if not set it defaults to
112 ## remote_urls with credentials etc, if not set it defaults to
113 ## `beaker.session.secret`
113 ## `beaker.session.secret`
114 #rhodecode.encrypted_values.secret =
114 #rhodecode.encrypted_values.secret =
115
115
116 ## decryption strict mode (enabled by default). It controls if decryption raises
116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 #rhodecode.encrypted_values.strict = false
118 #rhodecode.encrypted_values.strict = false
119
119
120 ## return gzipped responses from Rhodecode (static files/application)
120 ## return gzipped responses from Rhodecode (static files/application)
121 gzip_responses = false
121 gzip_responses = false
122
122
123 ## autogenerate javascript routes file on startup
123 ## autogenerate javascript routes file on startup
124 generate_js_files = false
124 generate_js_files = false
125
125
126 ## Optional Languages
126 ## Optional Languages
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 lang = en
128 lang = en
129
129
130 ## perform a full repository scan on each server start, this should be
130 ## perform a full repository scan on each server start, this should be
131 ## set to false after first startup, to allow faster server restarts.
131 ## set to false after first startup, to allow faster server restarts.
132 startup.import_repos = false
132 startup.import_repos = false
133
133
134 ## Uncomment and set this path to use archive download cache.
134 ## Uncomment and set this path to use archive download cache.
135 ## Once enabled, generated archives will be cached at this location
135 ## Once enabled, generated archives will be cached at this location
136 ## and served from the cache during subsequent requests for the same archive of
136 ## and served from the cache during subsequent requests for the same archive of
137 ## the repository.
137 ## the repository.
138 #archive_cache_dir = /tmp/tarballcache
138 #archive_cache_dir = /tmp/tarballcache
139
139
140 ## change this to unique ID for security
140 ## change this to unique ID for security
141 app_instance_uuid = rc-production
141 app_instance_uuid = rc-production
142
142
143 ## cut off limit for large diffs (size in bytes)
143 ## cut off limit for large diffs (size in bytes)
144 cut_off_limit_diff = 1024000
144 cut_off_limit_diff = 1024000
145 cut_off_limit_file = 256000
145 cut_off_limit_file = 256000
146
146
147 ## use cache version of scm repo everywhere
147 ## use cache version of scm repo everywhere
148 vcs_full_cache = true
148 vcs_full_cache = true
149
149
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
151 ## Normally this is controlled by proper http flags sent from http server
151 ## Normally this is controlled by proper http flags sent from http server
152 force_https = false
152 force_https = false
153
153
154 ## use Strict-Transport-Security headers
154 ## use Strict-Transport-Security headers
155 use_htsts = false
155 use_htsts = false
156
156
157 ## number of commits stats will parse on each iteration
157 ## number of commits stats will parse on each iteration
158 commit_parse_limit = 25
158 commit_parse_limit = 25
159
159
160 ## git rev filter option, --all is the default filter, if you need to
160 ## git rev filter option, --all is the default filter, if you need to
161 ## hide all refs in changelog switch this to --branches --tags
161 ## hide all refs in changelog switch this to --branches --tags
162 git_rev_filter = --branches --tags
162 git_rev_filter = --branches --tags
163
163
164 # Set to true if your repos are exposed using the dumb protocol
164 # Set to true if your repos are exposed using the dumb protocol
165 git_update_server_info = false
165 git_update_server_info = false
166
166
167 ## RSS/ATOM feed options
167 ## RSS/ATOM feed options
168 rss_cut_off_limit = 256000
168 rss_cut_off_limit = 256000
169 rss_items_per_page = 10
169 rss_items_per_page = 10
170 rss_include_diff = false
170 rss_include_diff = false
171
171
172 ## gist URL alias, used to create nicer urls for gist. This should be an
172 ## gist URL alias, used to create nicer urls for gist. This should be an
173 ## url that does rewrites to _admin/gists/{gistid}.
173 ## url that does rewrites to _admin/gists/{gistid}.
174 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
174 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
175 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
175 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
176 gist_alias_url =
176 gist_alias_url =
177
177
178 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
178 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
179 ## used for access.
179 ## used for access.
180 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
180 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
181 ## came from the the logged in user who own this authentication token.
181 ## came from the the logged in user who own this authentication token.
182 ##
182 ##
183 ## Syntax is ControllerClass:function_pattern.
183 ## Syntax is ControllerClass:function_pattern.
184 ## To enable access to raw_files put `FilesController:raw`.
184 ## To enable access to raw_files put `FilesController:raw`.
185 ## To enable access to patches add `ChangesetController:changeset_patch`.
185 ## To enable access to patches add `ChangesetController:changeset_patch`.
186 ## The list should be "," separated and on a single line.
186 ## The list should be "," separated and on a single line.
187 ##
187 ##
188 ## Recommended controllers to enable:
188 ## Recommended controllers to enable:
189 # ChangesetController:changeset_patch,
189 # ChangesetController:changeset_patch,
190 # ChangesetController:changeset_raw,
190 # ChangesetController:changeset_raw,
191 # FilesController:raw,
191 # FilesController:raw,
192 # FilesController:archivefile,
192 # FilesController:archivefile,
193 # GistsController:*,
193 # GistsController:*,
194 api_access_controllers_whitelist =
194 api_access_controllers_whitelist =
195
195
196 ## default encoding used to convert from and to unicode
196 ## default encoding used to convert from and to unicode
197 ## can be also a comma separated list of encoding in case of mixed encodings
197 ## can be also a comma separated list of encoding in case of mixed encodings
198 default_encoding = UTF-8
198 default_encoding = UTF-8
199
199
200 ## instance-id prefix
200 ## instance-id prefix
201 ## a prefix key for this instance used for cache invalidation when running
201 ## a prefix key for this instance used for cache invalidation when running
202 ## multiple instances of rhodecode, make sure it's globally unique for
202 ## multiple instances of rhodecode, make sure it's globally unique for
203 ## all running rhodecode instances. Leave empty if you don't use it
203 ## all running rhodecode instances. Leave empty if you don't use it
204 instance_id =
204 instance_id =
205
205
206 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
206 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
207 ## of an authentication plugin also if it is disabled by it's settings.
207 ## of an authentication plugin also if it is disabled by it's settings.
208 ## This could be useful if you are unable to log in to the system due to broken
208 ## This could be useful if you are unable to log in to the system due to broken
209 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
209 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
210 ## module to log in again and fix the settings.
210 ## module to log in again and fix the settings.
211 ##
211 ##
212 ## Available builtin plugin IDs (hash is part of the ID):
212 ## Available builtin plugin IDs (hash is part of the ID):
213 ## egg:rhodecode-enterprise-ce#rhodecode
213 ## egg:rhodecode-enterprise-ce#rhodecode
214 ## egg:rhodecode-enterprise-ce#pam
214 ## egg:rhodecode-enterprise-ce#pam
215 ## egg:rhodecode-enterprise-ce#ldap
215 ## egg:rhodecode-enterprise-ce#ldap
216 ## egg:rhodecode-enterprise-ce#jasig_cas
216 ## egg:rhodecode-enterprise-ce#jasig_cas
217 ## egg:rhodecode-enterprise-ce#headers
217 ## egg:rhodecode-enterprise-ce#headers
218 ## egg:rhodecode-enterprise-ce#crowd
218 ## egg:rhodecode-enterprise-ce#crowd
219 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
219 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
220
220
221 ## alternative return HTTP header for failed authentication. Default HTTP
221 ## alternative return HTTP header for failed authentication. Default HTTP
222 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
222 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
223 ## handling that causing a series of failed authentication calls.
223 ## handling that causing a series of failed authentication calls.
224 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
224 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
225 ## This will be served instead of default 401 on bad authnetication
225 ## This will be served instead of default 401 on bad authnetication
226 auth_ret_code =
226 auth_ret_code =
227
227
228 ## use special detection method when serving auth_ret_code, instead of serving
228 ## use special detection method when serving auth_ret_code, instead of serving
229 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
229 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
230 ## and then serve auth_ret_code to clients
230 ## and then serve auth_ret_code to clients
231 auth_ret_code_detection = false
231 auth_ret_code_detection = false
232
232
233 ## locking return code. When repository is locked return this HTTP code. 2XX
233 ## locking return code. When repository is locked return this HTTP code. 2XX
234 ## codes don't break the transactions while 4XX codes do
234 ## codes don't break the transactions while 4XX codes do
235 lock_ret_code = 423
235 lock_ret_code = 423
236
236
237 ## allows to change the repository location in settings page
237 ## allows to change the repository location in settings page
238 allow_repo_location_change = true
238 allow_repo_location_change = true
239
239
240 ## allows to setup custom hooks in settings page
240 ## allows to setup custom hooks in settings page
241 allow_custom_hooks_settings = true
241 allow_custom_hooks_settings = true
242
242
243 ## generated license token, goto license page in RhodeCode settings to obtain
243 ## generated license token, goto license page in RhodeCode settings to obtain
244 ## new token
244 ## new token
245 license_token =
245 license_token =
246
246
247 ## supervisor connection uri, for managing supervisor and logs.
247 ## supervisor connection uri, for managing supervisor and logs.
248 supervisor.uri =
248 supervisor.uri =
249 ## supervisord group name/id we only want this RC instance to handle
249 ## supervisord group name/id we only want this RC instance to handle
250 supervisor.group_id = prod
250 supervisor.group_id = prod
251
251
252 ## Display extended labs settings
252 ## Display extended labs settings
253 labs_settings_active = true
253 labs_settings_active = true
254
254
255 ####################################
255 ####################################
256 ### CELERY CONFIG ####
256 ### CELERY CONFIG ####
257 ####################################
257 ####################################
258 use_celery = false
258 use_celery = false
259 broker.host = localhost
259 broker.host = localhost
260 broker.vhost = rabbitmqhost
260 broker.vhost = rabbitmqhost
261 broker.port = 5672
261 broker.port = 5672
262 broker.user = rabbitmq
262 broker.user = rabbitmq
263 broker.password = qweqwe
263 broker.password = qweqwe
264
264
265 celery.imports = rhodecode.lib.celerylib.tasks
265 celery.imports = rhodecode.lib.celerylib.tasks
266
266
267 celery.result.backend = amqp
267 celery.result.backend = amqp
268 celery.result.dburi = amqp://
268 celery.result.dburi = amqp://
269 celery.result.serialier = json
269 celery.result.serialier = json
270
270
271 #celery.send.task.error.emails = true
271 #celery.send.task.error.emails = true
272 #celery.amqp.task.result.expires = 18000
272 #celery.amqp.task.result.expires = 18000
273
273
274 celeryd.concurrency = 2
274 celeryd.concurrency = 2
275 #celeryd.log.file = celeryd.log
275 #celeryd.log.file = celeryd.log
276 celeryd.log.level = debug
276 celeryd.log.level = debug
277 celeryd.max.tasks.per.child = 1
277 celeryd.max.tasks.per.child = 1
278
278
279 ## tasks will never be sent to the queue, but executed locally instead.
279 ## tasks will never be sent to the queue, but executed locally instead.
280 celery.always.eager = false
280 celery.always.eager = false
281
281
282 ####################################
282 ####################################
283 ### BEAKER CACHE ####
283 ### BEAKER CACHE ####
284 ####################################
284 ####################################
285 # default cache dir for templates. Putting this into a ramdisk
285 # default cache dir for templates. Putting this into a ramdisk
286 ## can boost performance, eg. %(here)s/data_ramdisk
286 ## can boost performance, eg. %(here)s/data_ramdisk
287 cache_dir = %(here)s/data
287 cache_dir = %(here)s/data
288
288
289 ## locking and default file storage for Beaker. Putting this into a ramdisk
289 ## locking and default file storage for Beaker. Putting this into a ramdisk
290 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
290 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
291 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
291 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
292 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
292 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
293
293
294 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
294 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
295
295
296 beaker.cache.super_short_term.type = memory
296 beaker.cache.super_short_term.type = memory
297 beaker.cache.super_short_term.expire = 10
297 beaker.cache.super_short_term.expire = 10
298 beaker.cache.super_short_term.key_length = 256
298 beaker.cache.super_short_term.key_length = 256
299
299
300 beaker.cache.short_term.type = memory
300 beaker.cache.short_term.type = memory
301 beaker.cache.short_term.expire = 60
301 beaker.cache.short_term.expire = 60
302 beaker.cache.short_term.key_length = 256
302 beaker.cache.short_term.key_length = 256
303
303
304 beaker.cache.long_term.type = memory
304 beaker.cache.long_term.type = memory
305 beaker.cache.long_term.expire = 36000
305 beaker.cache.long_term.expire = 36000
306 beaker.cache.long_term.key_length = 256
306 beaker.cache.long_term.key_length = 256
307
307
308 beaker.cache.sql_cache_short.type = memory
308 beaker.cache.sql_cache_short.type = memory
309 beaker.cache.sql_cache_short.expire = 10
309 beaker.cache.sql_cache_short.expire = 10
310 beaker.cache.sql_cache_short.key_length = 256
310 beaker.cache.sql_cache_short.key_length = 256
311
311
312 ## default is memory cache, configure only if required
312 ## default is memory cache, configure only if required
313 ## using multi-node or multi-worker setup
313 ## using multi-node or multi-worker setup
314 #beaker.cache.auth_plugins.type = ext:database
314 #beaker.cache.auth_plugins.type = ext:database
315 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
315 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
316 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
316 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
317 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
317 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
318 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
318 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
319 #beaker.cache.auth_plugins.sa.pool_size = 10
319 #beaker.cache.auth_plugins.sa.pool_size = 10
320 #beaker.cache.auth_plugins.sa.max_overflow = 0
320 #beaker.cache.auth_plugins.sa.max_overflow = 0
321
321
322 beaker.cache.repo_cache_long.type = memorylru_base
322 beaker.cache.repo_cache_long.type = memorylru_base
323 beaker.cache.repo_cache_long.max_items = 4096
323 beaker.cache.repo_cache_long.max_items = 4096
324 beaker.cache.repo_cache_long.expire = 2592000
324 beaker.cache.repo_cache_long.expire = 2592000
325
325
326 ## default is memorylru_base cache, configure only if required
326 ## default is memorylru_base cache, configure only if required
327 ## using multi-node or multi-worker setup
327 ## using multi-node or multi-worker setup
328 #beaker.cache.repo_cache_long.type = ext:memcached
328 #beaker.cache.repo_cache_long.type = ext:memcached
329 #beaker.cache.repo_cache_long.url = localhost:11211
329 #beaker.cache.repo_cache_long.url = localhost:11211
330 #beaker.cache.repo_cache_long.expire = 1209600
330 #beaker.cache.repo_cache_long.expire = 1209600
331 #beaker.cache.repo_cache_long.key_length = 256
331 #beaker.cache.repo_cache_long.key_length = 256
332
332
333 ####################################
333 ####################################
334 ### BEAKER SESSION ####
334 ### BEAKER SESSION ####
335 ####################################
335 ####################################
336
336
337 ## .session.type is type of storage options for the session, current allowed
337 ## .session.type is type of storage options for the session, current allowed
338 ## types are file, ext:memcached, ext:database, and memory (default).
338 ## types are file, ext:memcached, ext:database, and memory (default).
339 beaker.session.type = file
339 beaker.session.type = file
340 beaker.session.data_dir = %(here)s/data/sessions/data
340 beaker.session.data_dir = %(here)s/data/sessions/data
341
341
342 ## db based session, fast, and allows easy management over logged in users
342 ## db based session, fast, and allows easy management over logged in users
343 #beaker.session.type = ext:database
343 #beaker.session.type = ext:database
344 #beaker.session.table_name = db_session
344 #beaker.session.table_name = db_session
345 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
345 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
346 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
346 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
347 #beaker.session.sa.pool_recycle = 3600
347 #beaker.session.sa.pool_recycle = 3600
348 #beaker.session.sa.echo = false
348 #beaker.session.sa.echo = false
349
349
350 beaker.session.key = rhodecode
350 beaker.session.key = rhodecode
351 beaker.session.secret = production-rc-uytcxaz
351 beaker.session.secret = production-rc-uytcxaz
352 beaker.session.lock_dir = %(here)s/data/sessions/lock
352 beaker.session.lock_dir = %(here)s/data/sessions/lock
353
353
354 ## Secure encrypted cookie. Requires AES and AES python libraries
354 ## Secure encrypted cookie. Requires AES and AES python libraries
355 ## you must disable beaker.session.secret to use this
355 ## you must disable beaker.session.secret to use this
356 #beaker.session.encrypt_key = key_for_encryption
356 #beaker.session.encrypt_key = key_for_encryption
357 #beaker.session.validate_key = validation_key
357 #beaker.session.validate_key = validation_key
358
358
359 ## sets session as invalid(also logging out user) if it haven not been
359 ## sets session as invalid(also logging out user) if it haven not been
360 ## accessed for given amount of time in seconds
360 ## accessed for given amount of time in seconds
361 beaker.session.timeout = 2592000
361 beaker.session.timeout = 2592000
362 beaker.session.httponly = true
362 beaker.session.httponly = true
363 ## Path to use for the cookie. Set to prefix if you use prefix middleware
363 ## Path to use for the cookie. Set to prefix if you use prefix middleware
364 #beaker.session.cookie_path = /custom_prefix
364 #beaker.session.cookie_path = /custom_prefix
365
365
366 ## uncomment for https secure cookie
366 ## uncomment for https secure cookie
367 beaker.session.secure = false
367 beaker.session.secure = false
368
368
369 ## auto save the session to not to use .save()
369 ## auto save the session to not to use .save()
370 beaker.session.auto = false
370 beaker.session.auto = false
371
371
372 ## default cookie expiration time in seconds, set to `true` to set expire
372 ## default cookie expiration time in seconds, set to `true` to set expire
373 ## at browser close
373 ## at browser close
374 #beaker.session.cookie_expires = 3600
374 #beaker.session.cookie_expires = 3600
375
375
376 ###################################
376 ###################################
377 ## SEARCH INDEXING CONFIGURATION ##
377 ## SEARCH INDEXING CONFIGURATION ##
378 ###################################
378 ###################################
379 ## Full text search indexer is available in rhodecode-tools under
379 ## Full text search indexer is available in rhodecode-tools under
380 ## `rhodecode-tools index` command
380 ## `rhodecode-tools index` command
381
381
382 ## WHOOSH Backend, doesn't require additional services to run
382 ## WHOOSH Backend, doesn't require additional services to run
383 ## it works good with few dozen repos
383 ## it works good with few dozen repos
384 search.module = rhodecode.lib.index.whoosh
384 search.module = rhodecode.lib.index.whoosh
385 search.location = %(here)s/data/index
385 search.location = %(here)s/data/index
386
386
387 ########################################
387 ########################################
388 ### CHANNELSTREAM CONFIG ####
388 ### CHANNELSTREAM CONFIG ####
389 ########################################
389 ########################################
390 ## channelstream enables persistent connections and live notification
390 ## channelstream enables persistent connections and live notification
391 ## in the system. It's also used by the chat system
391 ## in the system. It's also used by the chat system
392 channelstream.enabled = false
392 channelstream.enabled = false
393
393
394 ## server address for channelstream server on the backend
394 ## server address for channelstream server on the backend
395 channelstream.server = 127.0.0.1:9800
395 channelstream.server = 127.0.0.1:9800
396
396
397 ## location of the channelstream server from outside world
397 ## location of the channelstream server from outside world
398 ## use ws:// for http or wss:// for https. This address needs to be handled
398 ## use ws:// for http or wss:// for https. This address needs to be handled
399 ## by external HTTP server such as Nginx or Apache
399 ## by external HTTP server such as Nginx or Apache
400 ## see nginx/apache configuration examples in our docs
400 ## see nginx/apache configuration examples in our docs
401 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
401 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
402 channelstream.secret = secret
402 channelstream.secret = secret
403 channelstream.history.location = %(here)s/channelstream_history
403 channelstream.history.location = %(here)s/channelstream_history
404
404
405 ## Internal application path that Javascript uses to connect into.
405 ## Internal application path that Javascript uses to connect into.
406 ## If you use proxy-prefix the prefix should be added before /_channelstream
406 ## If you use proxy-prefix the prefix should be added before /_channelstream
407 channelstream.proxy_path = /_channelstream
407 channelstream.proxy_path = /_channelstream
408
408
409
409
410 ###################################
410 ###################################
411 ## APPENLIGHT CONFIG ##
411 ## APPENLIGHT CONFIG ##
412 ###################################
412 ###################################
413
413
414 ## Appenlight is tailored to work with RhodeCode, see
414 ## Appenlight is tailored to work with RhodeCode, see
415 ## http://appenlight.com for details how to obtain an account
415 ## http://appenlight.com for details how to obtain an account
416
416
417 ## appenlight integration enabled
417 ## appenlight integration enabled
418 appenlight = false
418 appenlight = false
419
419
420 appenlight.server_url = https://api.appenlight.com
420 appenlight.server_url = https://api.appenlight.com
421 appenlight.api_key = YOUR_API_KEY
421 appenlight.api_key = YOUR_API_KEY
422 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
422 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
423
423
424 # used for JS client
424 # used for JS client
425 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
425 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
426
426
427 ## TWEAK AMOUNT OF INFO SENT HERE
427 ## TWEAK AMOUNT OF INFO SENT HERE
428
428
429 ## enables 404 error logging (default False)
429 ## enables 404 error logging (default False)
430 appenlight.report_404 = false
430 appenlight.report_404 = false
431
431
432 ## time in seconds after request is considered being slow (default 1)
432 ## time in seconds after request is considered being slow (default 1)
433 appenlight.slow_request_time = 1
433 appenlight.slow_request_time = 1
434
434
435 ## record slow requests in application
435 ## record slow requests in application
436 ## (needs to be enabled for slow datastore recording and time tracking)
436 ## (needs to be enabled for slow datastore recording and time tracking)
437 appenlight.slow_requests = true
437 appenlight.slow_requests = true
438
438
439 ## enable hooking to application loggers
439 ## enable hooking to application loggers
440 appenlight.logging = true
440 appenlight.logging = true
441
441
442 ## minimum log level for log capture
442 ## minimum log level for log capture
443 appenlight.logging.level = WARNING
443 appenlight.logging.level = WARNING
444
444
445 ## send logs only from erroneous/slow requests
445 ## send logs only from erroneous/slow requests
446 ## (saves API quota for intensive logging)
446 ## (saves API quota for intensive logging)
447 appenlight.logging_on_error = false
447 appenlight.logging_on_error = false
448
448
449 ## list of additonal keywords that should be grabbed from environ object
449 ## list of additonal keywords that should be grabbed from environ object
450 ## can be string with comma separated list of words in lowercase
450 ## can be string with comma separated list of words in lowercase
451 ## (by default client will always send following info:
451 ## (by default client will always send following info:
452 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
452 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
453 ## start with HTTP* this list be extended with additional keywords here
453 ## start with HTTP* this list be extended with additional keywords here
454 appenlight.environ_keys_whitelist =
454 appenlight.environ_keys_whitelist =
455
455
456 ## list of keywords that should be blanked from request object
456 ## list of keywords that should be blanked from request object
457 ## can be string with comma separated list of words in lowercase
457 ## can be string with comma separated list of words in lowercase
458 ## (by default client will always blank keys that contain following words
458 ## (by default client will always blank keys that contain following words
459 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
459 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
460 ## this list be extended with additional keywords set here
460 ## this list be extended with additional keywords set here
461 appenlight.request_keys_blacklist =
461 appenlight.request_keys_blacklist =
462
462
463 ## list of namespaces that should be ignores when gathering log entries
463 ## list of namespaces that should be ignores when gathering log entries
464 ## can be string with comma separated list of namespaces
464 ## can be string with comma separated list of namespaces
465 ## (by default the client ignores own entries: appenlight_client.client)
465 ## (by default the client ignores own entries: appenlight_client.client)
466 appenlight.log_namespace_blacklist =
466 appenlight.log_namespace_blacklist =
467
467
468
468
469 ################################################################################
469 ################################################################################
470 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
470 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
471 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
471 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
472 ## execute malicious code after an exception is raised. ##
472 ## execute malicious code after an exception is raised. ##
473 ################################################################################
473 ################################################################################
474 set debug = false
474 set debug = false
475
475
476
476
477 ###########################################
477 ###########################################
478 ### MAIN RHODECODE DATABASE CONFIG ###
478 ### MAIN RHODECODE DATABASE CONFIG ###
479 ###########################################
479 ###########################################
480 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
480 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
481 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
481 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
482 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
482 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
483 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
483 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
484
484
485 # see sqlalchemy docs for other advanced settings
485 # see sqlalchemy docs for other advanced settings
486
486
487 ## print the sql statements to output
487 ## print the sql statements to output
488 sqlalchemy.db1.echo = false
488 sqlalchemy.db1.echo = false
489 ## recycle the connections after this amount of seconds
489 ## recycle the connections after this amount of seconds
490 sqlalchemy.db1.pool_recycle = 3600
490 sqlalchemy.db1.pool_recycle = 3600
491 sqlalchemy.db1.convert_unicode = true
491 sqlalchemy.db1.convert_unicode = true
492
492
493 ## the number of connections to keep open inside the connection pool.
493 ## the number of connections to keep open inside the connection pool.
494 ## 0 indicates no limit
494 ## 0 indicates no limit
495 #sqlalchemy.db1.pool_size = 5
495 #sqlalchemy.db1.pool_size = 5
496
496
497 ## the number of connections to allow in connection pool "overflow", that is
497 ## the number of connections to allow in connection pool "overflow", that is
498 ## connections that can be opened above and beyond the pool_size setting,
498 ## connections that can be opened above and beyond the pool_size setting,
499 ## which defaults to five.
499 ## which defaults to five.
500 #sqlalchemy.db1.max_overflow = 10
500 #sqlalchemy.db1.max_overflow = 10
501
501
502
502
503 ##################
503 ##################
504 ### VCS CONFIG ###
504 ### VCS CONFIG ###
505 ##################
505 ##################
506 vcs.server.enable = true
506 vcs.server.enable = true
507 vcs.server = localhost:9900
507 vcs.server = localhost:9900
508
508
509 ## Web server connectivity protocol, responsible for web based VCS operatations
509 ## Web server connectivity protocol, responsible for web based VCS operatations
510 ## Available protocols are:
510 ## Available protocols are:
511 ## `pyro4` - use pyro4 server
512 ## `http` - use http-rpc backend (default)
511 ## `http` - use http-rpc backend (default)
513 vcs.server.protocol = http
512 vcs.server.protocol = http
514
513
515 ## Push/Pull operations protocol, available options are:
514 ## Push/Pull operations protocol, available options are:
516 ## `pyro4` - use pyro4 server
517 ## `http` - use http-rpc backend (default)
515 ## `http` - use http-rpc backend (default)
518 ##
516 ##
519 vcs.scm_app_implementation = http
517 vcs.scm_app_implementation = http
520
518
521 ## Push/Pull operations hooks protocol, available options are:
519 ## Push/Pull operations hooks protocol, available options are:
522 ## `pyro4` - use pyro4 server
523 ## `http` - use http-rpc backend (default)
520 ## `http` - use http-rpc backend (default)
524 vcs.hooks.protocol = http
521 vcs.hooks.protocol = http
525
522
526 vcs.server.log_level = info
523 vcs.server.log_level = info
527 ## Start VCSServer with this instance as a subprocess, usefull for development
524 ## Start VCSServer with this instance as a subprocess, usefull for development
528 vcs.start_server = false
525 vcs.start_server = false
529
526
530 ## List of enabled VCS backends, available options are:
527 ## List of enabled VCS backends, available options are:
531 ## `hg` - mercurial
528 ## `hg` - mercurial
532 ## `git` - git
529 ## `git` - git
533 ## `svn` - subversion
530 ## `svn` - subversion
534 vcs.backends = hg, git, svn
531 vcs.backends = hg, git, svn
535
532
536 vcs.connection_timeout = 3600
533 vcs.connection_timeout = 3600
537 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
534 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
538 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
535 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
539 #vcs.svn.compatible_version = pre-1.8-compatible
536 #vcs.svn.compatible_version = pre-1.8-compatible
540
537
541
538
542 ############################################################
539 ############################################################
543 ### Subversion proxy support (mod_dav_svn) ###
540 ### Subversion proxy support (mod_dav_svn) ###
544 ### Maps RhodeCode repo groups into SVN paths for Apache ###
541 ### Maps RhodeCode repo groups into SVN paths for Apache ###
545 ############################################################
542 ############################################################
546 ## Enable or disable the config file generation.
543 ## Enable or disable the config file generation.
547 svn.proxy.generate_config = false
544 svn.proxy.generate_config = false
548 ## Generate config file with `SVNListParentPath` set to `On`.
545 ## Generate config file with `SVNListParentPath` set to `On`.
549 svn.proxy.list_parent_path = true
546 svn.proxy.list_parent_path = true
550 ## Set location and file name of generated config file.
547 ## Set location and file name of generated config file.
551 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
548 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
552 ## Used as a prefix to the `Location` block in the generated config file.
549 ## Used as a prefix to the `Location` block in the generated config file.
553 ## In most cases it should be set to `/`.
550 ## In most cases it should be set to `/`.
554 svn.proxy.location_root = /
551 svn.proxy.location_root = /
555 ## Command to reload the mod dav svn configuration on change.
552 ## Command to reload the mod dav svn configuration on change.
556 ## Example: `/etc/init.d/apache2 reload`
553 ## Example: `/etc/init.d/apache2 reload`
557 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
554 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
558 ## If the timeout expires before the reload command finishes, the command will
555 ## If the timeout expires before the reload command finishes, the command will
559 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
556 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
560 #svn.proxy.reload_timeout = 10
557 #svn.proxy.reload_timeout = 10
561
558
562 ## Dummy marker to add new entries after.
559 ## Dummy marker to add new entries after.
563 ## Add any custom entries below. Please don't remove.
560 ## Add any custom entries below. Please don't remove.
564 custom.conf = 1
561 custom.conf = 1
565
562
566
563
567 ################################
564 ################################
568 ### LOGGING CONFIGURATION ####
565 ### LOGGING CONFIGURATION ####
569 ################################
566 ################################
570 [loggers]
567 [loggers]
571 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
568 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
572
569
573 [handlers]
570 [handlers]
574 keys = console, console_sql
571 keys = console, console_sql
575
572
576 [formatters]
573 [formatters]
577 keys = generic, color_formatter, color_formatter_sql
574 keys = generic, color_formatter, color_formatter_sql
578
575
579 #############
576 #############
580 ## LOGGERS ##
577 ## LOGGERS ##
581 #############
578 #############
582 [logger_root]
579 [logger_root]
583 level = NOTSET
580 level = NOTSET
584 handlers = console
581 handlers = console
585
582
586 [logger_routes]
583 [logger_routes]
587 level = DEBUG
584 level = DEBUG
588 handlers =
585 handlers =
589 qualname = routes.middleware
586 qualname = routes.middleware
590 ## "level = DEBUG" logs the route matched and routing variables.
587 ## "level = DEBUG" logs the route matched and routing variables.
591 propagate = 1
588 propagate = 1
592
589
593 [logger_beaker]
590 [logger_beaker]
594 level = DEBUG
591 level = DEBUG
595 handlers =
592 handlers =
596 qualname = beaker.container
593 qualname = beaker.container
597 propagate = 1
594 propagate = 1
598
595
599 [logger_templates]
596 [logger_templates]
600 level = INFO
597 level = INFO
601 handlers =
598 handlers =
602 qualname = pylons.templating
599 qualname = pylons.templating
603 propagate = 1
600 propagate = 1
604
601
605 [logger_rhodecode]
602 [logger_rhodecode]
606 level = DEBUG
603 level = DEBUG
607 handlers =
604 handlers =
608 qualname = rhodecode
605 qualname = rhodecode
609 propagate = 1
606 propagate = 1
610
607
611 [logger_sqlalchemy]
608 [logger_sqlalchemy]
612 level = INFO
609 level = INFO
613 handlers = console_sql
610 handlers = console_sql
614 qualname = sqlalchemy.engine
611 qualname = sqlalchemy.engine
615 propagate = 0
612 propagate = 0
616
613
617 ##############
614 ##############
618 ## HANDLERS ##
615 ## HANDLERS ##
619 ##############
616 ##############
620
617
621 [handler_console]
618 [handler_console]
622 class = StreamHandler
619 class = StreamHandler
623 args = (sys.stderr, )
620 args = (sys.stderr, )
624 level = INFO
621 level = INFO
625 formatter = generic
622 formatter = generic
626
623
627 [handler_console_sql]
624 [handler_console_sql]
628 class = StreamHandler
625 class = StreamHandler
629 args = (sys.stderr, )
626 args = (sys.stderr, )
630 level = WARN
627 level = WARN
631 formatter = generic
628 formatter = generic
632
629
633 ################
630 ################
634 ## FORMATTERS ##
631 ## FORMATTERS ##
635 ################
632 ################
636
633
637 [formatter_generic]
634 [formatter_generic]
638 class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter
635 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
639 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
636 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
640 datefmt = %Y-%m-%d %H:%M:%S
637 datefmt = %Y-%m-%d %H:%M:%S
641
638
642 [formatter_color_formatter]
639 [formatter_color_formatter]
643 class = rhodecode.lib.logging_formatter.ColorFormatter
640 class = rhodecode.lib.logging_formatter.ColorFormatter
644 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
641 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
645 datefmt = %Y-%m-%d %H:%M:%S
642 datefmt = %Y-%m-%d %H:%M:%S
646
643
647 [formatter_color_formatter_sql]
644 [formatter_color_formatter_sql]
648 class = rhodecode.lib.logging_formatter.ColorFormatterSql
645 class = rhodecode.lib.logging_formatter.ColorFormatterSql
649 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
646 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
650 datefmt = %Y-%m-%d %H:%M:%S
647 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,23 +1,22 b''
1
1
2 ======================================
2 ======================================
3 VCS client and VCSServer integration
3 VCS client and VCSServer integration
4 ======================================
4 ======================================
5
5
6 Enterprise uses the VCSServer as a backend to provide version control
6 Enterprise uses the VCSServer as a backend to provide version control
7 functionalities. This section describes the components in Enterprise which talk
7 functionalities. This section describes the components in Enterprise which talk
8 to the VCSServer.
8 to the VCSServer.
9
9
10 The client library is implemented in :mod:`rhodecode.lib.vcs`. For HTTP based
10 The client library is implemented in :mod:`rhodecode.lib.vcs`. For HTTP based
11 access of the command line clients special middlewares and utilities are
11 access of the command line clients special middlewares and utilities are
12 implemented in :mod:`rhodecode.lib.middleware`.
12 implemented in :mod:`rhodecode.lib.middleware`.
13
13
14
14
15
15
16
16
17 .. toctree::
17 .. toctree::
18 :maxdepth: 2
18 :maxdepth: 2
19
19
20 http-transition
21 middleware
20 middleware
22 vcsserver
21 vcsserver
23 subversion
22 subversion
@@ -1,130 +1,124 b''
1 .. _debug-mode:
1 .. _debug-mode:
2
2
3 Enabling Debug Mode
3 Enabling Debug Mode
4 -------------------
4 -------------------
5
5
6 To enable debug mode on a |RCE| instance you need to set the debug property
6 To enable debug mode on a |RCE| instance you need to set the debug property
7 in the :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file. To
7 in the :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file. To
8 do this, use the following steps
8 do this, use the following steps
9
9
10 1. Open the file and set the ``debug`` line to ``true``
10 1. Open the file and set the ``debug`` line to ``true``
11 2. Restart you instance using the ``rccontrol restart`` command,
11 2. Restart you instance using the ``rccontrol restart`` command,
12 see the following example:
12 see the following example:
13
13
14 You can also set the log level, the follow are the valid options;
14 You can also set the log level, the follow are the valid options;
15 ``debug``, ``info``, ``warning``, or ``fatal``.
15 ``debug``, ``info``, ``warning``, or ``fatal``.
16
16
17 .. code-block:: ini
17 .. code-block:: ini
18
18
19 [DEFAULT]
19 [DEFAULT]
20 debug = true
20 debug = true
21 pdebug = false
21 pdebug = false
22
22
23 .. code-block:: bash
23 .. code-block:: bash
24
24
25 # Restart your instance
25 # Restart your instance
26 $ rccontrol restart enterprise-1
26 $ rccontrol restart enterprise-1
27 Instance "enterprise-1" successfully stopped.
27 Instance "enterprise-1" successfully stopped.
28 Instance "enterprise-1" successfully started.
28 Instance "enterprise-1" successfully started.
29
29
30 Debug and Logging Configuration
30 Debug and Logging Configuration
31 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
31 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
32
32
33 Further debugging and logging settings can also be set in the
33 Further debugging and logging settings can also be set in the
34 :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file.
34 :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file.
35
35
36 In the logging section, the various packages that run with |RCE| can have
36 In the logging section, the various packages that run with |RCE| can have
37 different debug levels set. If you want to increase the logging level change
37 different debug levels set. If you want to increase the logging level change
38 ``level = DEBUG`` line to one of the valid options.
38 ``level = DEBUG`` line to one of the valid options.
39
39
40 You also need to change the log level for handlers. See the example
40 You also need to change the log level for handlers. See the example
41 ``##handler`` section below. The ``handler`` level takes the same options as
41 ``##handler`` section below. The ``handler`` level takes the same options as
42 the ``debug`` level.
42 the ``debug`` level.
43
43
44 .. code-block:: ini
44 .. code-block:: ini
45
45
46 ################################
46 ################################
47 ### LOGGING CONFIGURATION ####
47 ### LOGGING CONFIGURATION ####
48 ################################
48 ################################
49 [loggers]
49 [loggers]
50 keys = root, routes, rhodecode, sqlalchemy, beaker, pyro4, templates
50 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
51
51
52 [handlers]
52 [handlers]
53 keys = console, console_sql, file, file_rotating
53 keys = console, console_sql, file, file_rotating
54
54
55 [formatters]
55 [formatters]
56 keys = generic, color_formatter, color_formatter_sql
56 keys = generic, color_formatter, color_formatter_sql
57
57
58 #############
58 #############
59 ## LOGGERS ##
59 ## LOGGERS ##
60 #############
60 #############
61 [logger_root]
61 [logger_root]
62 level = NOTSET
62 level = NOTSET
63 handlers = console
63 handlers = console
64
64
65 [logger_routes]
65 [logger_routes]
66 level = DEBUG
66 level = DEBUG
67 handlers =
67 handlers =
68 qualname = routes.middleware
68 qualname = routes.middleware
69 ## "level = DEBUG" logs the route matched and routing variables.
69 ## "level = DEBUG" logs the route matched and routing variables.
70 propagate = 1
70 propagate = 1
71
71
72 [logger_beaker]
72 [logger_beaker]
73 level = DEBUG
73 level = DEBUG
74 handlers =
74 handlers =
75 qualname = beaker.container
75 qualname = beaker.container
76 propagate = 1
76 propagate = 1
77
77
78 [logger_pyro4]
79 level = DEBUG
80 handlers =
81 qualname = Pyro4
82 propagate = 1
83
84 [logger_templates]
78 [logger_templates]
85 level = INFO
79 level = INFO
86 handlers =
80 handlers =
87 qualname = pylons.templating
81 qualname = pylons.templating
88 propagate = 1
82 propagate = 1
89
83
90 [logger_rhodecode]
84 [logger_rhodecode]
91 level = DEBUG
85 level = DEBUG
92 handlers =
86 handlers =
93 qualname = rhodecode
87 qualname = rhodecode
94 propagate = 1
88 propagate = 1
95
89
96 [logger_sqlalchemy]
90 [logger_sqlalchemy]
97 level = INFO
91 level = INFO
98 handlers = console_sql
92 handlers = console_sql
99 qualname = sqlalchemy.engine
93 qualname = sqlalchemy.engine
100 propagate = 0
94 propagate = 0
101
95
102 ##############
96 ##############
103 ## HANDLERS ##
97 ## HANDLERS ##
104 ##############
98 ##############
105
99
106 [handler_console]
100 [handler_console]
107 class = StreamHandler
101 class = StreamHandler
108 args = (sys.stderr,)
102 args = (sys.stderr,)
109 level = INFO
103 level = INFO
110 formatter = generic
104 formatter = generic
111
105
112 [handler_console_sql]
106 [handler_console_sql]
113 class = StreamHandler
107 class = StreamHandler
114 args = (sys.stderr,)
108 args = (sys.stderr,)
115 level = WARN
109 level = WARN
116 formatter = generic
110 formatter = generic
117
111
118 [handler_file]
112 [handler_file]
119 class = FileHandler
113 class = FileHandler
120 args = ('rhodecode.log', 'a',)
114 args = ('rhodecode.log', 'a',)
121 level = INFO
115 level = INFO
122 formatter = generic
116 formatter = generic
123
117
124 [handler_file_rotating]
118 [handler_file_rotating]
125 class = logging.handlers.TimedRotatingFileHandler
119 class = logging.handlers.TimedRotatingFileHandler
126 # 'D', 5 - rotate every 5days
120 # 'D', 5 - rotate every 5days
127 # you can set 'h', 'midnight'
121 # you can set 'h', 'midnight'
128 args = ('rhodecode.log', 'D', 5, 10,)
122 args = ('rhodecode.log', 'D', 5, 10,)
129 level = INFO
123 level = INFO
130 formatter = generic
124 formatter = generic
@@ -1,304 +1,298 b''
1 .. _vcs-server:
1 .. _vcs-server:
2
2
3 VCS Server Management
3 VCS Server Management
4 ---------------------
4 ---------------------
5
5
6 The VCS Server handles |RCM| backend functionality. You need to configure
6 The VCS Server handles |RCM| backend functionality. You need to configure
7 a VCS Server to run with a |RCM| instance. If you do not, you will be missing
7 a VCS Server to run with a |RCM| instance. If you do not, you will be missing
8 the connection between |RCM| and its |repos|. This will cause error messages
8 the connection between |RCM| and its |repos|. This will cause error messages
9 on the web interface. You can run your setup in the following configurations,
9 on the web interface. You can run your setup in the following configurations,
10 currently the best performance is one VCS Server per |RCM| instance:
10 currently the best performance is one VCS Server per |RCM| instance:
11
11
12 * One VCS Server per |RCM| instance.
12 * One VCS Server per |RCM| instance.
13 * One VCS Server handling multiple instances.
13 * One VCS Server handling multiple instances.
14
14
15 .. important::
15 .. important::
16
16
17 If your server locale settings are not correctly configured,
17 If your server locale settings are not correctly configured,
18 |RCE| and the VCS Server can run into issues. See this `Ask Ubuntu`_ post
18 |RCE| and the VCS Server can run into issues. See this `Ask Ubuntu`_ post
19 which explains the problem and gives a solution.
19 which explains the problem and gives a solution.
20
20
21 For more information, see the following sections:
21 For more information, see the following sections:
22
22
23 * :ref:`install-vcs`
23 * :ref:`install-vcs`
24 * :ref:`config-vcs`
24 * :ref:`config-vcs`
25 * :ref:`vcs-server-options`
25 * :ref:`vcs-server-options`
26 * :ref:`vcs-server-versions`
26 * :ref:`vcs-server-versions`
27 * :ref:`vcs-server-maintain`
27 * :ref:`vcs-server-maintain`
28 * :ref:`vcs-server-config-file`
28 * :ref:`vcs-server-config-file`
29 * :ref:`svn-http`
29 * :ref:`svn-http`
30
30
31 .. _install-vcs:
31 .. _install-vcs:
32
32
33 VCS Server Installation
33 VCS Server Installation
34 ^^^^^^^^^^^^^^^^^^^^^^^
34 ^^^^^^^^^^^^^^^^^^^^^^^
35
35
36 To install a VCS Server, see
36 To install a VCS Server, see
37 :ref:`Installing a VCS server <control:install-vcsserver>`.
37 :ref:`Installing a VCS server <control:install-vcsserver>`.
38
38
39 .. _config-vcs:
39 .. _config-vcs:
40
40
41 Hooking |RCE| to its VCS Server
41 Hooking |RCE| to its VCS Server
42 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
42 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
43
43
44 To configure a |RCE| instance to use a VCS server, see
44 To configure a |RCE| instance to use a VCS server, see
45 :ref:`Configuring the VCS Server connection <control:manually-vcsserver-ini>`.
45 :ref:`Configuring the VCS Server connection <control:manually-vcsserver-ini>`.
46
46
47 .. _vcs-server-options:
47 .. _vcs-server-options:
48
48
49 |RCE| VCS Server Options
49 |RCE| VCS Server Options
50 ^^^^^^^^^^^^^^^^^^^^^^^^
50 ^^^^^^^^^^^^^^^^^^^^^^^^
51
51
52 The following list shows the available options on the |RCM| side of the
52 The following list shows the available options on the |RCM| side of the
53 connection to the VCS Server. The settings are configured per
53 connection to the VCS Server. The settings are configured per
54 instance in the
54 instance in the
55 :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file.
55 :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file.
56
56
57 .. rst-class:: dl-horizontal
57 .. rst-class:: dl-horizontal
58
58
59 \vcs.backends <available-vcs-systems>
59 \vcs.backends <available-vcs-systems>
60 Set a comma-separated list of the |repo| options available from the
60 Set a comma-separated list of the |repo| options available from the
61 web interface. The default is ``hg, git, svn``,
61 web interface. The default is ``hg, git, svn``,
62 which is all |repo| types available.
62 which is all |repo| types available.
63
63
64 \vcs.connection_timeout <seconds>
64 \vcs.connection_timeout <seconds>
65 Set the length of time in seconds that the VCS Server waits for
65 Set the length of time in seconds that the VCS Server waits for
66 requests to process. After the timeout expires,
66 requests to process. After the timeout expires,
67 the request is closed. The default is ``3600``. Set to a higher
67 the request is closed. The default is ``3600``. Set to a higher
68 number if you experience network latency, or timeout issues with very
68 number if you experience network latency, or timeout issues with very
69 large push/pull requests.
69 large push/pull requests.
70
70
71 \vcs.server.enable <boolean>
71 \vcs.server.enable <boolean>
72 Enable or disable the VCS Server. The available options are ``true`` or
72 Enable or disable the VCS Server. The available options are ``true`` or
73 ``false``. The default is ``true``.
73 ``false``. The default is ``true``.
74
74
75 \vcs.server <host:port>
75 \vcs.server <host:port>
76 Set the host, either hostname or IP Address, and port of the VCS server
76 Set the host, either hostname or IP Address, and port of the VCS server
77 you wish to run with your |RCM| instance.
77 you wish to run with your |RCM| instance.
78
78
79 .. code-block:: ini
79 .. code-block:: ini
80
80
81 ##################
81 ##################
82 ### VCS CONFIG ###
82 ### VCS CONFIG ###
83 ##################
83 ##################
84 # set this line to match your VCS Server
84 # set this line to match your VCS Server
85 vcs.server = 127.0.0.1:10004
85 vcs.server = 127.0.0.1:10004
86 # Set to False to disable the VCS Server
86 # Set to False to disable the VCS Server
87 vcs.server.enable = True
87 vcs.server.enable = True
88 vcs.backends = hg, git, svn
88 vcs.backends = hg, git, svn
89 vcs.connection_timeout = 3600
89 vcs.connection_timeout = 3600
90
90
91
91
92 .. _vcs-server-versions:
92 .. _vcs-server-versions:
93
93
94 VCS Server Versions
94 VCS Server Versions
95 ^^^^^^^^^^^^^^^^^^^
95 ^^^^^^^^^^^^^^^^^^^
96
96
97 An updated version of the VCS Server is released with each |RCE| version. Use
97 An updated version of the VCS Server is released with each |RCE| version. Use
98 the VCS Server number that matches with the |RCE| version to pair the
98 the VCS Server number that matches with the |RCE| version to pair the
99 appropriate ones together. For |RCE| versions pre 3.3.0,
99 appropriate ones together. For |RCE| versions pre 3.3.0,
100 VCS Server 1.X.Y works with |RCE| 3.X.Y, for example:
100 VCS Server 1.X.Y works with |RCE| 3.X.Y, for example:
101
101
102 * VCS Server 1.0.0 works with |RCE| 3.0.0
102 * VCS Server 1.0.0 works with |RCE| 3.0.0
103 * VCS Server 1.2.2 works with |RCE| 3.2.2
103 * VCS Server 1.2.2 works with |RCE| 3.2.2
104
104
105 For |RCE| versions post 3.3.0, the VCS Server and |RCE| version numbers
105 For |RCE| versions post 3.3.0, the VCS Server and |RCE| version numbers
106 match, for example:
106 match, for example:
107
107
108 * VCS Server |release| works with |RCE| |release|
108 * VCS Server |release| works with |RCE| |release|
109
109
110 .. _vcs-server-maintain:
110 .. _vcs-server-maintain:
111
111
112 VCS Server Memory Optimization
112 VCS Server Memory Optimization
113 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
113 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
114
114
115 To configure the VCS server to manage the cache efficiently, you need to
115 To configure the VCS server to manage the cache efficiently, you need to
116 configure the following options in the
116 configure the following options in the
117 :file:`/home/{user}/.rccontrol/{vcsserver-id}/vcsserver.ini` file. Once
117 :file:`/home/{user}/.rccontrol/{vcsserver-id}/vcsserver.ini` file. Once
118 configured, restart the VCS Server.
118 configured, restart the VCS Server.
119
119
120 .. rst-class:: dl-horizontal
120 .. rst-class:: dl-horizontal
121
121
122 \beaker.cache.repo_object.type = memorylru
122 \beaker.cache.repo_object.type = memorylru
123 Configures the cache to discard the least recently used items.
123 Configures the cache to discard the least recently used items.
124 This setting takes the following valid options:
124 This setting takes the following valid options:
125
125
126 * ``memorylru``: The default setting, which removes the least recently
126 * ``memorylru``: The default setting, which removes the least recently
127 used items from the cache.
127 used items from the cache.
128 * ``memory``: Runs the VCS Server without clearing the cache.
128 * ``memory``: Runs the VCS Server without clearing the cache.
129 * ``nocache``: Runs the VCS Server without a cache. This will
129 * ``nocache``: Runs the VCS Server without a cache. This will
130 dramatically reduce the VCS Server performance.
130 dramatically reduce the VCS Server performance.
131
131
132 \beaker.cache.repo_object.max_items = 100
132 \beaker.cache.repo_object.max_items = 100
133 Sets the maximum number of items stored in the cache, before the cache
133 Sets the maximum number of items stored in the cache, before the cache
134 starts to be cleared.
134 starts to be cleared.
135
135
136 As a general rule of thumb, running this value at 120 resulted in a
136 As a general rule of thumb, running this value at 120 resulted in a
137 5GB cache. Running it at 240 resulted in a 9GB cache. Your results
137 5GB cache. Running it at 240 resulted in a 9GB cache. Your results
138 will differ based on usage patterns and |repo| sizes.
138 will differ based on usage patterns and |repo| sizes.
139
139
140 Tweaking this value to run at a fairly constant memory load on your
140 Tweaking this value to run at a fairly constant memory load on your
141 server will help performance.
141 server will help performance.
142
142
143 To clear the cache completely, you can restart the VCS Server.
143 To clear the cache completely, you can restart the VCS Server.
144
144
145 .. important::
145 .. important::
146
146
147 While the VCS Server handles a restart gracefully on the web interface,
147 While the VCS Server handles a restart gracefully on the web interface,
148 it will drop connections during push/pull requests. So it is recommended
148 it will drop connections during push/pull requests. So it is recommended
149 you only perform this when there is very little traffic on the instance.
149 you only perform this when there is very little traffic on the instance.
150
150
151 Use the following example to restart your VCS Server,
151 Use the following example to restart your VCS Server,
152 for full details see the :ref:`RhodeCode Control CLI <control:rcc-cli>`.
152 for full details see the :ref:`RhodeCode Control CLI <control:rcc-cli>`.
153
153
154 .. code-block:: bash
154 .. code-block:: bash
155
155
156 $ rccontrol status
156 $ rccontrol status
157
157
158 .. code-block:: vim
158 .. code-block:: vim
159
159
160 - NAME: vcsserver-1
160 - NAME: vcsserver-1
161 - STATUS: RUNNING
161 - STATUS: RUNNING
162 - TYPE: VCSServer
162 - TYPE: VCSServer
163 - VERSION: 1.0.0
163 - VERSION: 1.0.0
164 - URL: http://127.0.0.1:10001
164 - URL: http://127.0.0.1:10001
165
165
166 $ rccontrol restart vcsserver-1
166 $ rccontrol restart vcsserver-1
167 Instance "vcsserver-1" successfully stopped.
167 Instance "vcsserver-1" successfully stopped.
168 Instance "vcsserver-1" successfully started.
168 Instance "vcsserver-1" successfully started.
169
169
170 .. _vcs-server-config-file:
170 .. _vcs-server-config-file:
171
171
172 VCS Server Configuration
172 VCS Server Configuration
173 ^^^^^^^^^^^^^^^^^^^^^^^^
173 ^^^^^^^^^^^^^^^^^^^^^^^^
174
174
175 You can configure settings for multiple VCS Servers on your
175 You can configure settings for multiple VCS Servers on your
176 system using their individual configuration files. Use the following
176 system using their individual configuration files. Use the following
177 properties inside the configuration file to set up your system. The default
177 properties inside the configuration file to set up your system. The default
178 location is :file:`home/{user}/.rccontrol/{vcsserver-id}/vcsserver.ini`.
178 location is :file:`home/{user}/.rccontrol/{vcsserver-id}/vcsserver.ini`.
179 For a more detailed explanation of the logger levers, see :ref:`debug-mode`.
179 For a more detailed explanation of the logger levers, see :ref:`debug-mode`.
180
180
181 .. rst-class:: dl-horizontal
181 .. rst-class:: dl-horizontal
182
182
183 \host <ip-address>
183 \host <ip-address>
184 Set the host on which the VCS Server will run.
184 Set the host on which the VCS Server will run.
185
185
186 \port <int>
186 \port <int>
187 Set the port number on which the VCS Server will be available.
187 Set the port number on which the VCS Server will be available.
188
188
189 \locale <locale_utf>
189 \locale <locale_utf>
190 Set the locale the VCS Server expects.
190 Set the locale the VCS Server expects.
191
191
192 \threadpool_size <int>
192 \threadpool_size <int>
193 Set the size of the threadpool used to communicate
193 Set the size of the threadpool used to communicate
194 with the WSGI workers. This should be at least 6 times the number of
194 with the WSGI workers. This should be at least 6 times the number of
195 WSGI worker processes.
195 WSGI worker processes.
196
196
197 \timeout <seconds>
197 \timeout <seconds>
198 Set the timeout for RPC communication in seconds.
198 Set the timeout for RPC communication in seconds.
199
199
200 .. note::
200 .. note::
201
201
202 After making changes, you need to restart your VCS Server to pick them up.
202 After making changes, you need to restart your VCS Server to pick them up.
203
203
204 .. code-block:: ini
204 .. code-block:: ini
205
205
206 ################################################################################
206 ################################################################################
207 # RhodeCode VCSServer - configuration #
207 # RhodeCode VCSServer - configuration #
208 # #
208 # #
209 ################################################################################
209 ################################################################################
210
210
211 [DEFAULT]
211 [DEFAULT]
212 host = 127.0.0.1
212 host = 127.0.0.1
213 port = 9900
213 port = 9900
214 locale = en_US.UTF-8
214 locale = en_US.UTF-8
215 # number of worker threads, this should be set based on a formula threadpool=N*6
215 # number of worker threads, this should be set based on a formula threadpool=N*6
216 # where N is number of RhodeCode Enterprise workers, eg. running 2 instances
216 # where N is number of RhodeCode Enterprise workers, eg. running 2 instances
217 # 8 gunicorn workers each would be 2 * 8 * 6 = 96, threadpool_size = 96
217 # 8 gunicorn workers each would be 2 * 8 * 6 = 96, threadpool_size = 96
218 threadpool_size = 16
218 threadpool_size = 16
219 timeout = 0
219 timeout = 0
220
220
221 # cache regions, please don't change
221 # cache regions, please don't change
222 beaker.cache.regions = repo_object
222 beaker.cache.regions = repo_object
223 beaker.cache.repo_object.type = memorylru
223 beaker.cache.repo_object.type = memorylru
224 beaker.cache.repo_object.max_items = 1000
224 beaker.cache.repo_object.max_items = 1000
225
225
226 # cache auto-expires after N seconds
226 # cache auto-expires after N seconds
227 beaker.cache.repo_object.expire = 10
227 beaker.cache.repo_object.expire = 10
228 beaker.cache.repo_object.enabled = true
228 beaker.cache.repo_object.enabled = true
229
229
230
230
231 ################################
231 ################################
232 ### LOGGING CONFIGURATION ####
232 ### LOGGING CONFIGURATION ####
233 ################################
233 ################################
234 [loggers]
234 [loggers]
235 keys = root, vcsserver, pyro4, beaker
235 keys = root, vcsserver, beaker
236
236
237 [handlers]
237 [handlers]
238 keys = console
238 keys = console
239
239
240 [formatters]
240 [formatters]
241 keys = generic
241 keys = generic
242
242
243 #############
243 #############
244 ## LOGGERS ##
244 ## LOGGERS ##
245 #############
245 #############
246 [logger_root]
246 [logger_root]
247 level = NOTSET
247 level = NOTSET
248 handlers = console
248 handlers = console
249
249
250 [logger_vcsserver]
250 [logger_vcsserver]
251 level = DEBUG
251 level = DEBUG
252 handlers =
252 handlers =
253 qualname = vcsserver
253 qualname = vcsserver
254 propagate = 1
254 propagate = 1
255
255
256 [logger_beaker]
256 [logger_beaker]
257 level = DEBUG
257 level = DEBUG
258 handlers =
258 handlers =
259 qualname = beaker
259 qualname = beaker
260 propagate = 1
260 propagate = 1
261
261
262 [logger_pyro4]
263 level = DEBUG
264 handlers =
265 qualname = Pyro4
266 propagate = 1
267
268
262
269 ##############
263 ##############
270 ## HANDLERS ##
264 ## HANDLERS ##
271 ##############
265 ##############
272
266
273 [handler_console]
267 [handler_console]
274 class = StreamHandler
268 class = StreamHandler
275 args = (sys.stderr,)
269 args = (sys.stderr,)
276 level = DEBUG
270 level = DEBUG
277 formatter = generic
271 formatter = generic
278
272
279 [handler_file]
273 [handler_file]
280 class = FileHandler
274 class = FileHandler
281 args = ('vcsserver.log', 'a',)
275 args = ('vcsserver.log', 'a',)
282 level = DEBUG
276 level = DEBUG
283 formatter = generic
277 formatter = generic
284
278
285 [handler_file_rotating]
279 [handler_file_rotating]
286 class = logging.handlers.TimedRotatingFileHandler
280 class = logging.handlers.TimedRotatingFileHandler
287 # 'D', 5 - rotate every 5days
281 # 'D', 5 - rotate every 5days
288 # you can set 'h', 'midnight'
282 # you can set 'h', 'midnight'
289 args = ('vcsserver.log', 'D', 5, 10,)
283 args = ('vcsserver.log', 'D', 5, 10,)
290 level = DEBUG
284 level = DEBUG
291 formatter = generic
285 formatter = generic
292
286
293 ################
287 ################
294 ## FORMATTERS ##
288 ## FORMATTERS ##
295 ################
289 ################
296
290
297 [formatter_generic]
291 [formatter_generic]
298 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
292 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
299 datefmt = %Y-%m-%d %H:%M:%S
293 datefmt = %Y-%m-%d %H:%M:%S
300
294
301
295
302 .. _Subversion Red Book: http://svnbook.red-bean.com/en/1.7/svn-book.html#svn.ref.svn
296 .. _Subversion Red Book: http://svnbook.red-bean.com/en/1.7/svn-book.html#svn.ref.svn
303
297
304 .. _Ask Ubuntu: http://askubuntu.com/questions/162391/how-do-i-fix-my-locale-issue
298 .. _Ask Ubuntu: http://askubuntu.com/questions/162391/how-do-i-fix-my-locale-issue
@@ -1,1852 +1,1826 b''
1 # Generated by pip2nix 0.4.0
1 # Generated by pip2nix 0.4.0
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 {
4 {
5 Babel = super.buildPythonPackage {
5 Babel = super.buildPythonPackage {
6 name = "Babel-1.3";
6 name = "Babel-1.3";
7 buildInputs = with self; [];
7 buildInputs = with self; [];
8 doCheck = false;
8 doCheck = false;
9 propagatedBuildInputs = with self; [pytz];
9 propagatedBuildInputs = with self; [pytz];
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
11 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
12 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
12 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.bsdOriginal ];
15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 };
16 };
17 };
17 };
18 Beaker = super.buildPythonPackage {
18 Beaker = super.buildPythonPackage {
19 name = "Beaker-1.7.0";
19 name = "Beaker-1.7.0";
20 buildInputs = with self; [];
20 buildInputs = with self; [];
21 doCheck = false;
21 doCheck = false;
22 propagatedBuildInputs = with self; [];
22 propagatedBuildInputs = with self; [];
23 src = fetchurl {
23 src = fetchurl {
24 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
24 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
25 md5 = "386be3f7fe427358881eee4622b428b3";
25 md5 = "386be3f7fe427358881eee4622b428b3";
26 };
26 };
27 meta = {
27 meta = {
28 license = [ pkgs.lib.licenses.bsdOriginal ];
28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 };
29 };
30 };
30 };
31 CProfileV = super.buildPythonPackage {
31 CProfileV = super.buildPythonPackage {
32 name = "CProfileV-1.0.6";
32 name = "CProfileV-1.0.6";
33 buildInputs = with self; [];
33 buildInputs = with self; [];
34 doCheck = false;
34 doCheck = false;
35 propagatedBuildInputs = with self; [bottle];
35 propagatedBuildInputs = with self; [bottle];
36 src = fetchurl {
36 src = fetchurl {
37 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
37 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
38 md5 = "08c7c242b6e64237bc53c5d13537e03d";
38 md5 = "08c7c242b6e64237bc53c5d13537e03d";
39 };
39 };
40 meta = {
40 meta = {
41 license = [ pkgs.lib.licenses.mit ];
41 license = [ pkgs.lib.licenses.mit ];
42 };
42 };
43 };
43 };
44 Chameleon = super.buildPythonPackage {
44 Chameleon = super.buildPythonPackage {
45 name = "Chameleon-2.24";
45 name = "Chameleon-2.24";
46 buildInputs = with self; [];
46 buildInputs = with self; [];
47 doCheck = false;
47 doCheck = false;
48 propagatedBuildInputs = with self; [];
48 propagatedBuildInputs = with self; [];
49 src = fetchurl {
49 src = fetchurl {
50 url = "https://pypi.python.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
50 url = "https://pypi.python.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
51 md5 = "1b01f1f6533a8a11d0d2f2366dec5342";
51 md5 = "1b01f1f6533a8a11d0d2f2366dec5342";
52 };
52 };
53 meta = {
53 meta = {
54 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
54 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
55 };
55 };
56 };
56 };
57 FormEncode = super.buildPythonPackage {
57 FormEncode = super.buildPythonPackage {
58 name = "FormEncode-1.2.4";
58 name = "FormEncode-1.2.4";
59 buildInputs = with self; [];
59 buildInputs = with self; [];
60 doCheck = false;
60 doCheck = false;
61 propagatedBuildInputs = with self; [];
61 propagatedBuildInputs = with self; [];
62 src = fetchurl {
62 src = fetchurl {
63 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
63 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
64 md5 = "6bc17fb9aed8aea198975e888e2077f4";
64 md5 = "6bc17fb9aed8aea198975e888e2077f4";
65 };
65 };
66 meta = {
66 meta = {
67 license = [ pkgs.lib.licenses.psfl ];
67 license = [ pkgs.lib.licenses.psfl ];
68 };
68 };
69 };
69 };
70 Jinja2 = super.buildPythonPackage {
70 Jinja2 = super.buildPythonPackage {
71 name = "Jinja2-2.7.3";
71 name = "Jinja2-2.7.3";
72 buildInputs = with self; [];
72 buildInputs = with self; [];
73 doCheck = false;
73 doCheck = false;
74 propagatedBuildInputs = with self; [MarkupSafe];
74 propagatedBuildInputs = with self; [MarkupSafe];
75 src = fetchurl {
75 src = fetchurl {
76 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
76 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
77 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
77 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
78 };
78 };
79 meta = {
79 meta = {
80 license = [ pkgs.lib.licenses.bsdOriginal ];
80 license = [ pkgs.lib.licenses.bsdOriginal ];
81 };
81 };
82 };
82 };
83 Mako = super.buildPythonPackage {
83 Mako = super.buildPythonPackage {
84 name = "Mako-1.0.6";
84 name = "Mako-1.0.6";
85 buildInputs = with self; [];
85 buildInputs = with self; [];
86 doCheck = false;
86 doCheck = false;
87 propagatedBuildInputs = with self; [MarkupSafe];
87 propagatedBuildInputs = with self; [MarkupSafe];
88 src = fetchurl {
88 src = fetchurl {
89 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
89 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
90 md5 = "a28e22a339080316b2acc352b9ee631c";
90 md5 = "a28e22a339080316b2acc352b9ee631c";
91 };
91 };
92 meta = {
92 meta = {
93 license = [ pkgs.lib.licenses.mit ];
93 license = [ pkgs.lib.licenses.mit ];
94 };
94 };
95 };
95 };
96 Markdown = super.buildPythonPackage {
96 Markdown = super.buildPythonPackage {
97 name = "Markdown-2.6.7";
97 name = "Markdown-2.6.7";
98 buildInputs = with self; [];
98 buildInputs = with self; [];
99 doCheck = false;
99 doCheck = false;
100 propagatedBuildInputs = with self; [];
100 propagatedBuildInputs = with self; [];
101 src = fetchurl {
101 src = fetchurl {
102 url = "https://pypi.python.org/packages/48/a4/fc6b002789c2239ac620ca963694c95b8f74e4747769cdf6021276939e74/Markdown-2.6.7.zip";
102 url = "https://pypi.python.org/packages/48/a4/fc6b002789c2239ac620ca963694c95b8f74e4747769cdf6021276939e74/Markdown-2.6.7.zip";
103 md5 = "632710a7474bbb74a82084392251061f";
103 md5 = "632710a7474bbb74a82084392251061f";
104 };
104 };
105 meta = {
105 meta = {
106 license = [ pkgs.lib.licenses.bsdOriginal ];
106 license = [ pkgs.lib.licenses.bsdOriginal ];
107 };
107 };
108 };
108 };
109 MarkupSafe = super.buildPythonPackage {
109 MarkupSafe = super.buildPythonPackage {
110 name = "MarkupSafe-0.23";
110 name = "MarkupSafe-0.23";
111 buildInputs = with self; [];
111 buildInputs = with self; [];
112 doCheck = false;
112 doCheck = false;
113 propagatedBuildInputs = with self; [];
113 propagatedBuildInputs = with self; [];
114 src = fetchurl {
114 src = fetchurl {
115 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
115 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
116 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
116 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
117 };
117 };
118 meta = {
118 meta = {
119 license = [ pkgs.lib.licenses.bsdOriginal ];
119 license = [ pkgs.lib.licenses.bsdOriginal ];
120 };
120 };
121 };
121 };
122 MySQL-python = super.buildPythonPackage {
122 MySQL-python = super.buildPythonPackage {
123 name = "MySQL-python-1.2.5";
123 name = "MySQL-python-1.2.5";
124 buildInputs = with self; [];
124 buildInputs = with self; [];
125 doCheck = false;
125 doCheck = false;
126 propagatedBuildInputs = with self; [];
126 propagatedBuildInputs = with self; [];
127 src = fetchurl {
127 src = fetchurl {
128 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
128 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
129 md5 = "654f75b302db6ed8dc5a898c625e030c";
129 md5 = "654f75b302db6ed8dc5a898c625e030c";
130 };
130 };
131 meta = {
131 meta = {
132 license = [ pkgs.lib.licenses.gpl1 ];
132 license = [ pkgs.lib.licenses.gpl1 ];
133 };
133 };
134 };
134 };
135 Paste = super.buildPythonPackage {
135 Paste = super.buildPythonPackage {
136 name = "Paste-2.0.3";
136 name = "Paste-2.0.3";
137 buildInputs = with self; [];
137 buildInputs = with self; [];
138 doCheck = false;
138 doCheck = false;
139 propagatedBuildInputs = with self; [six];
139 propagatedBuildInputs = with self; [six];
140 src = fetchurl {
140 src = fetchurl {
141 url = "https://pypi.python.org/packages/30/c3/5c2f7c7a02e4f58d4454353fa1c32c94f79fa4e36d07a67c0ac295ea369e/Paste-2.0.3.tar.gz";
141 url = "https://pypi.python.org/packages/30/c3/5c2f7c7a02e4f58d4454353fa1c32c94f79fa4e36d07a67c0ac295ea369e/Paste-2.0.3.tar.gz";
142 md5 = "1231e14eae62fa7ed76e9130b04bc61e";
142 md5 = "1231e14eae62fa7ed76e9130b04bc61e";
143 };
143 };
144 meta = {
144 meta = {
145 license = [ pkgs.lib.licenses.mit ];
145 license = [ pkgs.lib.licenses.mit ];
146 };
146 };
147 };
147 };
148 PasteDeploy = super.buildPythonPackage {
148 PasteDeploy = super.buildPythonPackage {
149 name = "PasteDeploy-1.5.2";
149 name = "PasteDeploy-1.5.2";
150 buildInputs = with self; [];
150 buildInputs = with self; [];
151 doCheck = false;
151 doCheck = false;
152 propagatedBuildInputs = with self; [];
152 propagatedBuildInputs = with self; [];
153 src = fetchurl {
153 src = fetchurl {
154 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
154 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
155 md5 = "352b7205c78c8de4987578d19431af3b";
155 md5 = "352b7205c78c8de4987578d19431af3b";
156 };
156 };
157 meta = {
157 meta = {
158 license = [ pkgs.lib.licenses.mit ];
158 license = [ pkgs.lib.licenses.mit ];
159 };
159 };
160 };
160 };
161 PasteScript = super.buildPythonPackage {
161 PasteScript = super.buildPythonPackage {
162 name = "PasteScript-1.7.5";
162 name = "PasteScript-1.7.5";
163 buildInputs = with self; [];
163 buildInputs = with self; [];
164 doCheck = false;
164 doCheck = false;
165 propagatedBuildInputs = with self; [Paste PasteDeploy];
165 propagatedBuildInputs = with self; [Paste PasteDeploy];
166 src = fetchurl {
166 src = fetchurl {
167 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
167 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
168 md5 = "4c72d78dcb6bb993f30536842c16af4d";
168 md5 = "4c72d78dcb6bb993f30536842c16af4d";
169 };
169 };
170 meta = {
170 meta = {
171 license = [ pkgs.lib.licenses.mit ];
171 license = [ pkgs.lib.licenses.mit ];
172 };
172 };
173 };
173 };
174 Pygments = super.buildPythonPackage {
174 Pygments = super.buildPythonPackage {
175 name = "Pygments-2.2.0";
175 name = "Pygments-2.2.0";
176 buildInputs = with self; [];
176 buildInputs = with self; [];
177 doCheck = false;
177 doCheck = false;
178 propagatedBuildInputs = with self; [];
178 propagatedBuildInputs = with self; [];
179 src = fetchurl {
179 src = fetchurl {
180 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
180 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
181 md5 = "13037baca42f16917cbd5ad2fab50844";
181 md5 = "13037baca42f16917cbd5ad2fab50844";
182 };
182 };
183 meta = {
183 meta = {
184 license = [ pkgs.lib.licenses.bsdOriginal ];
184 license = [ pkgs.lib.licenses.bsdOriginal ];
185 };
185 };
186 };
186 };
187 Pylons = super.buildPythonPackage {
187 Pylons = super.buildPythonPackage {
188 name = "Pylons-1.0.2.dev20170205";
188 name = "Pylons-1.0.2.dev20170205";
189 buildInputs = with self; [];
189 buildInputs = with self; [];
190 doCheck = false;
190 doCheck = false;
191 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
191 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
192 src = fetchurl {
192 src = fetchurl {
193 url = "https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f";
193 url = "https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f";
194 md5 = "f26633726fa2cd3a340316ee6a5d218f";
194 md5 = "f26633726fa2cd3a340316ee6a5d218f";
195 };
195 };
196 meta = {
196 meta = {
197 license = [ pkgs.lib.licenses.bsdOriginal ];
197 license = [ pkgs.lib.licenses.bsdOriginal ];
198 };
198 };
199 };
199 };
200 Pyro4 = super.buildPythonPackage {
201 name = "Pyro4-4.41";
202 buildInputs = with self; [];
203 doCheck = false;
204 propagatedBuildInputs = with self; [serpent];
205 src = fetchurl {
206 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
207 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
208 };
209 meta = {
210 license = [ pkgs.lib.licenses.mit ];
211 };
212 };
213 Routes = super.buildPythonPackage {
200 Routes = super.buildPythonPackage {
214 name = "Routes-1.13";
201 name = "Routes-1.13";
215 buildInputs = with self; [];
202 buildInputs = with self; [];
216 doCheck = false;
203 doCheck = false;
217 propagatedBuildInputs = with self; [repoze.lru];
204 propagatedBuildInputs = with self; [repoze.lru];
218 src = fetchurl {
205 src = fetchurl {
219 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
206 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
220 md5 = "d527b0ab7dd9172b1275a41f97448783";
207 md5 = "d527b0ab7dd9172b1275a41f97448783";
221 };
208 };
222 meta = {
209 meta = {
223 license = [ pkgs.lib.licenses.bsdOriginal ];
210 license = [ pkgs.lib.licenses.bsdOriginal ];
224 };
211 };
225 };
212 };
226 SQLAlchemy = super.buildPythonPackage {
213 SQLAlchemy = super.buildPythonPackage {
227 name = "SQLAlchemy-0.9.9";
214 name = "SQLAlchemy-0.9.9";
228 buildInputs = with self; [];
215 buildInputs = with self; [];
229 doCheck = false;
216 doCheck = false;
230 propagatedBuildInputs = with self; [];
217 propagatedBuildInputs = with self; [];
231 src = fetchurl {
218 src = fetchurl {
232 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
219 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
233 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
220 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
234 };
221 };
235 meta = {
222 meta = {
236 license = [ pkgs.lib.licenses.mit ];
223 license = [ pkgs.lib.licenses.mit ];
237 };
224 };
238 };
225 };
239 Sphinx = super.buildPythonPackage {
226 Sphinx = super.buildPythonPackage {
240 name = "Sphinx-1.2.2";
227 name = "Sphinx-1.2.2";
241 buildInputs = with self; [];
228 buildInputs = with self; [];
242 doCheck = false;
229 doCheck = false;
243 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
230 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
244 src = fetchurl {
231 src = fetchurl {
245 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
232 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
246 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
233 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
247 };
234 };
248 meta = {
235 meta = {
249 license = [ pkgs.lib.licenses.bsdOriginal ];
236 license = [ pkgs.lib.licenses.bsdOriginal ];
250 };
237 };
251 };
238 };
252 Tempita = super.buildPythonPackage {
239 Tempita = super.buildPythonPackage {
253 name = "Tempita-0.5.2";
240 name = "Tempita-0.5.2";
254 buildInputs = with self; [];
241 buildInputs = with self; [];
255 doCheck = false;
242 doCheck = false;
256 propagatedBuildInputs = with self; [];
243 propagatedBuildInputs = with self; [];
257 src = fetchurl {
244 src = fetchurl {
258 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
245 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
259 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
246 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
260 };
247 };
261 meta = {
248 meta = {
262 license = [ pkgs.lib.licenses.mit ];
249 license = [ pkgs.lib.licenses.mit ];
263 };
250 };
264 };
251 };
265 URLObject = super.buildPythonPackage {
252 URLObject = super.buildPythonPackage {
266 name = "URLObject-2.4.0";
253 name = "URLObject-2.4.0";
267 buildInputs = with self; [];
254 buildInputs = with self; [];
268 doCheck = false;
255 doCheck = false;
269 propagatedBuildInputs = with self; [];
256 propagatedBuildInputs = with self; [];
270 src = fetchurl {
257 src = fetchurl {
271 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
258 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
272 md5 = "2ed819738a9f0a3051f31dc9924e3065";
259 md5 = "2ed819738a9f0a3051f31dc9924e3065";
273 };
260 };
274 meta = {
261 meta = {
275 license = [ ];
262 license = [ ];
276 };
263 };
277 };
264 };
278 WebError = super.buildPythonPackage {
265 WebError = super.buildPythonPackage {
279 name = "WebError-0.10.3";
266 name = "WebError-0.10.3";
280 buildInputs = with self; [];
267 buildInputs = with self; [];
281 doCheck = false;
268 doCheck = false;
282 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
269 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
283 src = fetchurl {
270 src = fetchurl {
284 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
271 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
285 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
272 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
286 };
273 };
287 meta = {
274 meta = {
288 license = [ pkgs.lib.licenses.mit ];
275 license = [ pkgs.lib.licenses.mit ];
289 };
276 };
290 };
277 };
291 WebHelpers = super.buildPythonPackage {
278 WebHelpers = super.buildPythonPackage {
292 name = "WebHelpers-1.3";
279 name = "WebHelpers-1.3";
293 buildInputs = with self; [];
280 buildInputs = with self; [];
294 doCheck = false;
281 doCheck = false;
295 propagatedBuildInputs = with self; [MarkupSafe];
282 propagatedBuildInputs = with self; [MarkupSafe];
296 src = fetchurl {
283 src = fetchurl {
297 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
284 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
298 md5 = "32749ffadfc40fea51075a7def32588b";
285 md5 = "32749ffadfc40fea51075a7def32588b";
299 };
286 };
300 meta = {
287 meta = {
301 license = [ pkgs.lib.licenses.bsdOriginal ];
288 license = [ pkgs.lib.licenses.bsdOriginal ];
302 };
289 };
303 };
290 };
304 WebHelpers2 = super.buildPythonPackage {
291 WebHelpers2 = super.buildPythonPackage {
305 name = "WebHelpers2-2.0";
292 name = "WebHelpers2-2.0";
306 buildInputs = with self; [];
293 buildInputs = with self; [];
307 doCheck = false;
294 doCheck = false;
308 propagatedBuildInputs = with self; [MarkupSafe six];
295 propagatedBuildInputs = with self; [MarkupSafe six];
309 src = fetchurl {
296 src = fetchurl {
310 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
297 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
311 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
298 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
312 };
299 };
313 meta = {
300 meta = {
314 license = [ pkgs.lib.licenses.mit ];
301 license = [ pkgs.lib.licenses.mit ];
315 };
302 };
316 };
303 };
317 WebOb = super.buildPythonPackage {
304 WebOb = super.buildPythonPackage {
318 name = "WebOb-1.3.1";
305 name = "WebOb-1.3.1";
319 buildInputs = with self; [];
306 buildInputs = with self; [];
320 doCheck = false;
307 doCheck = false;
321 propagatedBuildInputs = with self; [];
308 propagatedBuildInputs = with self; [];
322 src = fetchurl {
309 src = fetchurl {
323 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
310 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
324 md5 = "20918251c5726956ba8fef22d1556177";
311 md5 = "20918251c5726956ba8fef22d1556177";
325 };
312 };
326 meta = {
313 meta = {
327 license = [ pkgs.lib.licenses.mit ];
314 license = [ pkgs.lib.licenses.mit ];
328 };
315 };
329 };
316 };
330 WebTest = super.buildPythonPackage {
317 WebTest = super.buildPythonPackage {
331 name = "WebTest-1.4.3";
318 name = "WebTest-1.4.3";
332 buildInputs = with self; [];
319 buildInputs = with self; [];
333 doCheck = false;
320 doCheck = false;
334 propagatedBuildInputs = with self; [WebOb];
321 propagatedBuildInputs = with self; [WebOb];
335 src = fetchurl {
322 src = fetchurl {
336 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
323 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
337 md5 = "631ce728bed92c681a4020a36adbc353";
324 md5 = "631ce728bed92c681a4020a36adbc353";
338 };
325 };
339 meta = {
326 meta = {
340 license = [ pkgs.lib.licenses.mit ];
327 license = [ pkgs.lib.licenses.mit ];
341 };
328 };
342 };
329 };
343 Whoosh = super.buildPythonPackage {
330 Whoosh = super.buildPythonPackage {
344 name = "Whoosh-2.7.4";
331 name = "Whoosh-2.7.4";
345 buildInputs = with self; [];
332 buildInputs = with self; [];
346 doCheck = false;
333 doCheck = false;
347 propagatedBuildInputs = with self; [];
334 propagatedBuildInputs = with self; [];
348 src = fetchurl {
335 src = fetchurl {
349 url = "https://pypi.python.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
336 url = "https://pypi.python.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
350 md5 = "c2710105f20b3e29936bd2357383c325";
337 md5 = "c2710105f20b3e29936bd2357383c325";
351 };
338 };
352 meta = {
339 meta = {
353 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
340 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
354 };
341 };
355 };
342 };
356 alembic = super.buildPythonPackage {
343 alembic = super.buildPythonPackage {
357 name = "alembic-0.8.4";
344 name = "alembic-0.8.4";
358 buildInputs = with self; [];
345 buildInputs = with self; [];
359 doCheck = false;
346 doCheck = false;
360 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
347 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
361 src = fetchurl {
348 src = fetchurl {
362 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
349 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
363 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
350 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
364 };
351 };
365 meta = {
352 meta = {
366 license = [ pkgs.lib.licenses.mit ];
353 license = [ pkgs.lib.licenses.mit ];
367 };
354 };
368 };
355 };
369 amqplib = super.buildPythonPackage {
356 amqplib = super.buildPythonPackage {
370 name = "amqplib-1.0.2";
357 name = "amqplib-1.0.2";
371 buildInputs = with self; [];
358 buildInputs = with self; [];
372 doCheck = false;
359 doCheck = false;
373 propagatedBuildInputs = with self; [];
360 propagatedBuildInputs = with self; [];
374 src = fetchurl {
361 src = fetchurl {
375 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
362 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
376 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
363 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
377 };
364 };
378 meta = {
365 meta = {
379 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
366 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
380 };
367 };
381 };
368 };
382 anyjson = super.buildPythonPackage {
369 anyjson = super.buildPythonPackage {
383 name = "anyjson-0.3.3";
370 name = "anyjson-0.3.3";
384 buildInputs = with self; [];
371 buildInputs = with self; [];
385 doCheck = false;
372 doCheck = false;
386 propagatedBuildInputs = with self; [];
373 propagatedBuildInputs = with self; [];
387 src = fetchurl {
374 src = fetchurl {
388 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
375 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
389 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
376 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
390 };
377 };
391 meta = {
378 meta = {
392 license = [ pkgs.lib.licenses.bsdOriginal ];
379 license = [ pkgs.lib.licenses.bsdOriginal ];
393 };
380 };
394 };
381 };
395 appenlight-client = super.buildPythonPackage {
382 appenlight-client = super.buildPythonPackage {
396 name = "appenlight-client-0.6.14";
383 name = "appenlight-client-0.6.14";
397 buildInputs = with self; [];
384 buildInputs = with self; [];
398 doCheck = false;
385 doCheck = false;
399 propagatedBuildInputs = with self; [WebOb requests];
386 propagatedBuildInputs = with self; [WebOb requests];
400 src = fetchurl {
387 src = fetchurl {
401 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
388 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
402 md5 = "578c69b09f4356d898fff1199b98a95c";
389 md5 = "578c69b09f4356d898fff1199b98a95c";
403 };
390 };
404 meta = {
391 meta = {
405 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
392 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
406 };
393 };
407 };
394 };
408 authomatic = super.buildPythonPackage {
395 authomatic = super.buildPythonPackage {
409 name = "authomatic-0.1.0.post1";
396 name = "authomatic-0.1.0.post1";
410 buildInputs = with self; [];
397 buildInputs = with self; [];
411 doCheck = false;
398 doCheck = false;
412 propagatedBuildInputs = with self; [];
399 propagatedBuildInputs = with self; [];
413 src = fetchurl {
400 src = fetchurl {
414 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
401 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
415 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
402 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
416 };
403 };
417 meta = {
404 meta = {
418 license = [ pkgs.lib.licenses.mit ];
405 license = [ pkgs.lib.licenses.mit ];
419 };
406 };
420 };
407 };
421 backport-ipaddress = super.buildPythonPackage {
408 backport-ipaddress = super.buildPythonPackage {
422 name = "backport-ipaddress-0.1";
409 name = "backport-ipaddress-0.1";
423 buildInputs = with self; [];
410 buildInputs = with self; [];
424 doCheck = false;
411 doCheck = false;
425 propagatedBuildInputs = with self; [];
412 propagatedBuildInputs = with self; [];
426 src = fetchurl {
413 src = fetchurl {
427 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
414 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
428 md5 = "9c1f45f4361f71b124d7293a60006c05";
415 md5 = "9c1f45f4361f71b124d7293a60006c05";
429 };
416 };
430 meta = {
417 meta = {
431 license = [ pkgs.lib.licenses.psfl ];
418 license = [ pkgs.lib.licenses.psfl ];
432 };
419 };
433 };
420 };
434 backports.shutil-get-terminal-size = super.buildPythonPackage {
421 backports.shutil-get-terminal-size = super.buildPythonPackage {
435 name = "backports.shutil-get-terminal-size-1.0.0";
422 name = "backports.shutil-get-terminal-size-1.0.0";
436 buildInputs = with self; [];
423 buildInputs = with self; [];
437 doCheck = false;
424 doCheck = false;
438 propagatedBuildInputs = with self; [];
425 propagatedBuildInputs = with self; [];
439 src = fetchurl {
426 src = fetchurl {
440 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
427 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
441 md5 = "03267762480bd86b50580dc19dff3c66";
428 md5 = "03267762480bd86b50580dc19dff3c66";
442 };
429 };
443 meta = {
430 meta = {
444 license = [ pkgs.lib.licenses.mit ];
431 license = [ pkgs.lib.licenses.mit ];
445 };
432 };
446 };
433 };
447 bottle = super.buildPythonPackage {
434 bottle = super.buildPythonPackage {
448 name = "bottle-0.12.8";
435 name = "bottle-0.12.8";
449 buildInputs = with self; [];
436 buildInputs = with self; [];
450 doCheck = false;
437 doCheck = false;
451 propagatedBuildInputs = with self; [];
438 propagatedBuildInputs = with self; [];
452 src = fetchurl {
439 src = fetchurl {
453 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
440 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
454 md5 = "13132c0a8f607bf860810a6ee9064c5b";
441 md5 = "13132c0a8f607bf860810a6ee9064c5b";
455 };
442 };
456 meta = {
443 meta = {
457 license = [ pkgs.lib.licenses.mit ];
444 license = [ pkgs.lib.licenses.mit ];
458 };
445 };
459 };
446 };
460 bumpversion = super.buildPythonPackage {
447 bumpversion = super.buildPythonPackage {
461 name = "bumpversion-0.5.3";
448 name = "bumpversion-0.5.3";
462 buildInputs = with self; [];
449 buildInputs = with self; [];
463 doCheck = false;
450 doCheck = false;
464 propagatedBuildInputs = with self; [];
451 propagatedBuildInputs = with self; [];
465 src = fetchurl {
452 src = fetchurl {
466 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
453 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
467 md5 = "c66a3492eafcf5ad4b024be9fca29820";
454 md5 = "c66a3492eafcf5ad4b024be9fca29820";
468 };
455 };
469 meta = {
456 meta = {
470 license = [ pkgs.lib.licenses.mit ];
457 license = [ pkgs.lib.licenses.mit ];
471 };
458 };
472 };
459 };
473 celery = super.buildPythonPackage {
460 celery = super.buildPythonPackage {
474 name = "celery-2.2.10";
461 name = "celery-2.2.10";
475 buildInputs = with self; [];
462 buildInputs = with self; [];
476 doCheck = false;
463 doCheck = false;
477 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
464 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
478 src = fetchurl {
465 src = fetchurl {
479 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
466 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
480 md5 = "898bc87e54f278055b561316ba73e222";
467 md5 = "898bc87e54f278055b561316ba73e222";
481 };
468 };
482 meta = {
469 meta = {
483 license = [ pkgs.lib.licenses.bsdOriginal ];
470 license = [ pkgs.lib.licenses.bsdOriginal ];
484 };
471 };
485 };
472 };
486 channelstream = super.buildPythonPackage {
473 channelstream = super.buildPythonPackage {
487 name = "channelstream-0.5.2";
474 name = "channelstream-0.5.2";
488 buildInputs = with self; [];
475 buildInputs = with self; [];
489 doCheck = false;
476 doCheck = false;
490 propagatedBuildInputs = with self; [gevent ws4py pyramid pyramid-jinja2 itsdangerous requests six];
477 propagatedBuildInputs = with self; [gevent ws4py pyramid pyramid-jinja2 itsdangerous requests six];
491 src = fetchurl {
478 src = fetchurl {
492 url = "https://pypi.python.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
479 url = "https://pypi.python.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
493 md5 = "1c5eb2a8a405be6f1073da94da6d81d3";
480 md5 = "1c5eb2a8a405be6f1073da94da6d81d3";
494 };
481 };
495 meta = {
482 meta = {
496 license = [ pkgs.lib.licenses.bsdOriginal ];
483 license = [ pkgs.lib.licenses.bsdOriginal ];
497 };
484 };
498 };
485 };
499 click = super.buildPythonPackage {
486 click = super.buildPythonPackage {
500 name = "click-5.1";
487 name = "click-5.1";
501 buildInputs = with self; [];
488 buildInputs = with self; [];
502 doCheck = false;
489 doCheck = false;
503 propagatedBuildInputs = with self; [];
490 propagatedBuildInputs = with self; [];
504 src = fetchurl {
491 src = fetchurl {
505 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
492 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
506 md5 = "9c5323008cccfe232a8b161fc8196d41";
493 md5 = "9c5323008cccfe232a8b161fc8196d41";
507 };
494 };
508 meta = {
495 meta = {
509 license = [ pkgs.lib.licenses.bsdOriginal ];
496 license = [ pkgs.lib.licenses.bsdOriginal ];
510 };
497 };
511 };
498 };
512 colander = super.buildPythonPackage {
499 colander = super.buildPythonPackage {
513 name = "colander-1.2";
500 name = "colander-1.2";
514 buildInputs = with self; [];
501 buildInputs = with self; [];
515 doCheck = false;
502 doCheck = false;
516 propagatedBuildInputs = with self; [translationstring iso8601];
503 propagatedBuildInputs = with self; [translationstring iso8601];
517 src = fetchurl {
504 src = fetchurl {
518 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
505 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
519 md5 = "83db21b07936a0726e588dae1914b9ed";
506 md5 = "83db21b07936a0726e588dae1914b9ed";
520 };
507 };
521 meta = {
508 meta = {
522 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
509 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
523 };
510 };
524 };
511 };
525 configobj = super.buildPythonPackage {
512 configobj = super.buildPythonPackage {
526 name = "configobj-5.0.6";
513 name = "configobj-5.0.6";
527 buildInputs = with self; [];
514 buildInputs = with self; [];
528 doCheck = false;
515 doCheck = false;
529 propagatedBuildInputs = with self; [six];
516 propagatedBuildInputs = with self; [six];
530 src = fetchurl {
517 src = fetchurl {
531 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
518 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
532 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
519 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
533 };
520 };
534 meta = {
521 meta = {
535 license = [ pkgs.lib.licenses.bsdOriginal ];
522 license = [ pkgs.lib.licenses.bsdOriginal ];
536 };
523 };
537 };
524 };
538 cov-core = super.buildPythonPackage {
525 cov-core = super.buildPythonPackage {
539 name = "cov-core-1.15.0";
526 name = "cov-core-1.15.0";
540 buildInputs = with self; [];
527 buildInputs = with self; [];
541 doCheck = false;
528 doCheck = false;
542 propagatedBuildInputs = with self; [coverage];
529 propagatedBuildInputs = with self; [coverage];
543 src = fetchurl {
530 src = fetchurl {
544 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
531 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
545 md5 = "f519d4cb4c4e52856afb14af52919fe6";
532 md5 = "f519d4cb4c4e52856afb14af52919fe6";
546 };
533 };
547 meta = {
534 meta = {
548 license = [ pkgs.lib.licenses.mit ];
535 license = [ pkgs.lib.licenses.mit ];
549 };
536 };
550 };
537 };
551 coverage = super.buildPythonPackage {
538 coverage = super.buildPythonPackage {
552 name = "coverage-3.7.1";
539 name = "coverage-3.7.1";
553 buildInputs = with self; [];
540 buildInputs = with self; [];
554 doCheck = false;
541 doCheck = false;
555 propagatedBuildInputs = with self; [];
542 propagatedBuildInputs = with self; [];
556 src = fetchurl {
543 src = fetchurl {
557 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
544 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
558 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
545 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
559 };
546 };
560 meta = {
547 meta = {
561 license = [ pkgs.lib.licenses.bsdOriginal ];
548 license = [ pkgs.lib.licenses.bsdOriginal ];
562 };
549 };
563 };
550 };
564 cssselect = super.buildPythonPackage {
551 cssselect = super.buildPythonPackage {
565 name = "cssselect-0.9.1";
552 name = "cssselect-0.9.1";
566 buildInputs = with self; [];
553 buildInputs = with self; [];
567 doCheck = false;
554 doCheck = false;
568 propagatedBuildInputs = with self; [];
555 propagatedBuildInputs = with self; [];
569 src = fetchurl {
556 src = fetchurl {
570 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
557 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
571 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
558 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
572 };
559 };
573 meta = {
560 meta = {
574 license = [ pkgs.lib.licenses.bsdOriginal ];
561 license = [ pkgs.lib.licenses.bsdOriginal ];
575 };
562 };
576 };
563 };
577 decorator = super.buildPythonPackage {
564 decorator = super.buildPythonPackage {
578 name = "decorator-3.4.2";
565 name = "decorator-3.4.2";
579 buildInputs = with self; [];
566 buildInputs = with self; [];
580 doCheck = false;
567 doCheck = false;
581 propagatedBuildInputs = with self; [];
568 propagatedBuildInputs = with self; [];
582 src = fetchurl {
569 src = fetchurl {
583 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
570 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
584 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
571 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
585 };
572 };
586 meta = {
573 meta = {
587 license = [ pkgs.lib.licenses.bsdOriginal ];
574 license = [ pkgs.lib.licenses.bsdOriginal ];
588 };
575 };
589 };
576 };
590 deform = super.buildPythonPackage {
577 deform = super.buildPythonPackage {
591 name = "deform-2.0a2";
578 name = "deform-2.0a2";
592 buildInputs = with self; [];
579 buildInputs = with self; [];
593 doCheck = false;
580 doCheck = false;
594 propagatedBuildInputs = with self; [Chameleon colander peppercorn translationstring zope.deprecation];
581 propagatedBuildInputs = with self; [Chameleon colander peppercorn translationstring zope.deprecation];
595 src = fetchurl {
582 src = fetchurl {
596 url = "https://pypi.python.org/packages/8d/b3/aab57e81da974a806dc9c5fa024a6404720f890a6dcf2e80885e3cb4609a/deform-2.0a2.tar.gz";
583 url = "https://pypi.python.org/packages/8d/b3/aab57e81da974a806dc9c5fa024a6404720f890a6dcf2e80885e3cb4609a/deform-2.0a2.tar.gz";
597 md5 = "7a90d41f7fbc18002ce74f39bd90a5e4";
584 md5 = "7a90d41f7fbc18002ce74f39bd90a5e4";
598 };
585 };
599 meta = {
586 meta = {
600 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
587 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
601 };
588 };
602 };
589 };
603 docutils = super.buildPythonPackage {
590 docutils = super.buildPythonPackage {
604 name = "docutils-0.12";
591 name = "docutils-0.12";
605 buildInputs = with self; [];
592 buildInputs = with self; [];
606 doCheck = false;
593 doCheck = false;
607 propagatedBuildInputs = with self; [];
594 propagatedBuildInputs = with self; [];
608 src = fetchurl {
595 src = fetchurl {
609 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
596 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
610 md5 = "4622263b62c5c771c03502afa3157768";
597 md5 = "4622263b62c5c771c03502afa3157768";
611 };
598 };
612 meta = {
599 meta = {
613 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
600 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
614 };
601 };
615 };
602 };
616 dogpile.cache = super.buildPythonPackage {
603 dogpile.cache = super.buildPythonPackage {
617 name = "dogpile.cache-0.6.1";
604 name = "dogpile.cache-0.6.1";
618 buildInputs = with self; [];
605 buildInputs = with self; [];
619 doCheck = false;
606 doCheck = false;
620 propagatedBuildInputs = with self; [];
607 propagatedBuildInputs = with self; [];
621 src = fetchurl {
608 src = fetchurl {
622 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
609 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
623 md5 = "35d7fb30f22bbd0685763d894dd079a9";
610 md5 = "35d7fb30f22bbd0685763d894dd079a9";
624 };
611 };
625 meta = {
612 meta = {
626 license = [ pkgs.lib.licenses.bsdOriginal ];
613 license = [ pkgs.lib.licenses.bsdOriginal ];
627 };
614 };
628 };
615 };
629 dogpile.core = super.buildPythonPackage {
616 dogpile.core = super.buildPythonPackage {
630 name = "dogpile.core-0.4.1";
617 name = "dogpile.core-0.4.1";
631 buildInputs = with self; [];
618 buildInputs = with self; [];
632 doCheck = false;
619 doCheck = false;
633 propagatedBuildInputs = with self; [];
620 propagatedBuildInputs = with self; [];
634 src = fetchurl {
621 src = fetchurl {
635 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
622 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
636 md5 = "01cb19f52bba3e95c9b560f39341f045";
623 md5 = "01cb19f52bba3e95c9b560f39341f045";
637 };
624 };
638 meta = {
625 meta = {
639 license = [ pkgs.lib.licenses.bsdOriginal ];
626 license = [ pkgs.lib.licenses.bsdOriginal ];
640 };
627 };
641 };
628 };
642 ecdsa = super.buildPythonPackage {
629 ecdsa = super.buildPythonPackage {
643 name = "ecdsa-0.11";
630 name = "ecdsa-0.11";
644 buildInputs = with self; [];
631 buildInputs = with self; [];
645 doCheck = false;
632 doCheck = false;
646 propagatedBuildInputs = with self; [];
633 propagatedBuildInputs = with self; [];
647 src = fetchurl {
634 src = fetchurl {
648 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
635 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
649 md5 = "8ef586fe4dbb156697d756900cb41d7c";
636 md5 = "8ef586fe4dbb156697d756900cb41d7c";
650 };
637 };
651 meta = {
638 meta = {
652 license = [ pkgs.lib.licenses.mit ];
639 license = [ pkgs.lib.licenses.mit ];
653 };
640 };
654 };
641 };
655 elasticsearch = super.buildPythonPackage {
642 elasticsearch = super.buildPythonPackage {
656 name = "elasticsearch-2.3.0";
643 name = "elasticsearch-2.3.0";
657 buildInputs = with self; [];
644 buildInputs = with self; [];
658 doCheck = false;
645 doCheck = false;
659 propagatedBuildInputs = with self; [urllib3];
646 propagatedBuildInputs = with self; [urllib3];
660 src = fetchurl {
647 src = fetchurl {
661 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
648 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
662 md5 = "2550f3b51629cf1ef9636608af92c340";
649 md5 = "2550f3b51629cf1ef9636608af92c340";
663 };
650 };
664 meta = {
651 meta = {
665 license = [ pkgs.lib.licenses.asl20 ];
652 license = [ pkgs.lib.licenses.asl20 ];
666 };
653 };
667 };
654 };
668 elasticsearch-dsl = super.buildPythonPackage {
655 elasticsearch-dsl = super.buildPythonPackage {
669 name = "elasticsearch-dsl-2.2.0";
656 name = "elasticsearch-dsl-2.2.0";
670 buildInputs = with self; [];
657 buildInputs = with self; [];
671 doCheck = false;
658 doCheck = false;
672 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
659 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
673 src = fetchurl {
660 src = fetchurl {
674 url = "https://pypi.python.org/packages/66/2f/52a086968788e58461641570f45c3207a52d46ebbe9b77dc22b6a8ffda66/elasticsearch-dsl-2.2.0.tar.gz";
661 url = "https://pypi.python.org/packages/66/2f/52a086968788e58461641570f45c3207a52d46ebbe9b77dc22b6a8ffda66/elasticsearch-dsl-2.2.0.tar.gz";
675 md5 = "fa6bd3c87ea3caa8f0f051bc37c53221";
662 md5 = "fa6bd3c87ea3caa8f0f051bc37c53221";
676 };
663 };
677 meta = {
664 meta = {
678 license = [ pkgs.lib.licenses.asl20 ];
665 license = [ pkgs.lib.licenses.asl20 ];
679 };
666 };
680 };
667 };
681 enum34 = super.buildPythonPackage {
668 enum34 = super.buildPythonPackage {
682 name = "enum34-1.1.6";
669 name = "enum34-1.1.6";
683 buildInputs = with self; [];
670 buildInputs = with self; [];
684 doCheck = false;
671 doCheck = false;
685 propagatedBuildInputs = with self; [];
672 propagatedBuildInputs = with self; [];
686 src = fetchurl {
673 src = fetchurl {
687 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
674 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
688 md5 = "5f13a0841a61f7fc295c514490d120d0";
675 md5 = "5f13a0841a61f7fc295c514490d120d0";
689 };
676 };
690 meta = {
677 meta = {
691 license = [ pkgs.lib.licenses.bsdOriginal ];
678 license = [ pkgs.lib.licenses.bsdOriginal ];
692 };
679 };
693 };
680 };
694 future = super.buildPythonPackage {
681 future = super.buildPythonPackage {
695 name = "future-0.14.3";
682 name = "future-0.14.3";
696 buildInputs = with self; [];
683 buildInputs = with self; [];
697 doCheck = false;
684 doCheck = false;
698 propagatedBuildInputs = with self; [];
685 propagatedBuildInputs = with self; [];
699 src = fetchurl {
686 src = fetchurl {
700 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
687 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
701 md5 = "e94079b0bd1fc054929e8769fc0f6083";
688 md5 = "e94079b0bd1fc054929e8769fc0f6083";
702 };
689 };
703 meta = {
690 meta = {
704 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
691 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
705 };
692 };
706 };
693 };
707 futures = super.buildPythonPackage {
694 futures = super.buildPythonPackage {
708 name = "futures-3.0.2";
695 name = "futures-3.0.2";
709 buildInputs = with self; [];
696 buildInputs = with self; [];
710 doCheck = false;
697 doCheck = false;
711 propagatedBuildInputs = with self; [];
698 propagatedBuildInputs = with self; [];
712 src = fetchurl {
699 src = fetchurl {
713 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
700 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
714 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
701 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
715 };
702 };
716 meta = {
703 meta = {
717 license = [ pkgs.lib.licenses.bsdOriginal ];
704 license = [ pkgs.lib.licenses.bsdOriginal ];
718 };
705 };
719 };
706 };
720 gevent = super.buildPythonPackage {
707 gevent = super.buildPythonPackage {
721 name = "gevent-1.1.2";
708 name = "gevent-1.1.2";
722 buildInputs = with self; [];
709 buildInputs = with self; [];
723 doCheck = false;
710 doCheck = false;
724 propagatedBuildInputs = with self; [greenlet];
711 propagatedBuildInputs = with self; [greenlet];
725 src = fetchurl {
712 src = fetchurl {
726 url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz";
713 url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz";
727 md5 = "bb32a2f852a4997138014d5007215c6e";
714 md5 = "bb32a2f852a4997138014d5007215c6e";
728 };
715 };
729 meta = {
716 meta = {
730 license = [ pkgs.lib.licenses.mit ];
717 license = [ pkgs.lib.licenses.mit ];
731 };
718 };
732 };
719 };
733 gnureadline = super.buildPythonPackage {
720 gnureadline = super.buildPythonPackage {
734 name = "gnureadline-6.3.3";
721 name = "gnureadline-6.3.3";
735 buildInputs = with self; [];
722 buildInputs = with self; [];
736 doCheck = false;
723 doCheck = false;
737 propagatedBuildInputs = with self; [];
724 propagatedBuildInputs = with self; [];
738 src = fetchurl {
725 src = fetchurl {
739 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
726 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
740 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
727 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
741 };
728 };
742 meta = {
729 meta = {
743 license = [ pkgs.lib.licenses.gpl1 ];
730 license = [ pkgs.lib.licenses.gpl1 ];
744 };
731 };
745 };
732 };
746 gprof2dot = super.buildPythonPackage {
733 gprof2dot = super.buildPythonPackage {
747 name = "gprof2dot-2016.10.13";
734 name = "gprof2dot-2016.10.13";
748 buildInputs = with self; [];
735 buildInputs = with self; [];
749 doCheck = false;
736 doCheck = false;
750 propagatedBuildInputs = with self; [];
737 propagatedBuildInputs = with self; [];
751 src = fetchurl {
738 src = fetchurl {
752 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
739 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
753 md5 = "0125401f15fd2afe1df686a76c64a4fd";
740 md5 = "0125401f15fd2afe1df686a76c64a4fd";
754 };
741 };
755 meta = {
742 meta = {
756 license = [ { fullName = "LGPL"; } ];
743 license = [ { fullName = "LGPL"; } ];
757 };
744 };
758 };
745 };
759 greenlet = super.buildPythonPackage {
746 greenlet = super.buildPythonPackage {
760 name = "greenlet-0.4.10";
747 name = "greenlet-0.4.10";
761 buildInputs = with self; [];
748 buildInputs = with self; [];
762 doCheck = false;
749 doCheck = false;
763 propagatedBuildInputs = with self; [];
750 propagatedBuildInputs = with self; [];
764 src = fetchurl {
751 src = fetchurl {
765 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
752 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
766 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
753 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
767 };
754 };
768 meta = {
755 meta = {
769 license = [ pkgs.lib.licenses.mit ];
756 license = [ pkgs.lib.licenses.mit ];
770 };
757 };
771 };
758 };
772 gunicorn = super.buildPythonPackage {
759 gunicorn = super.buildPythonPackage {
773 name = "gunicorn-19.6.0";
760 name = "gunicorn-19.6.0";
774 buildInputs = with self; [];
761 buildInputs = with self; [];
775 doCheck = false;
762 doCheck = false;
776 propagatedBuildInputs = with self; [];
763 propagatedBuildInputs = with self; [];
777 src = fetchurl {
764 src = fetchurl {
778 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
765 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
779 md5 = "338e5e8a83ea0f0625f768dba4597530";
766 md5 = "338e5e8a83ea0f0625f768dba4597530";
780 };
767 };
781 meta = {
768 meta = {
782 license = [ pkgs.lib.licenses.mit ];
769 license = [ pkgs.lib.licenses.mit ];
783 };
770 };
784 };
771 };
785 infrae.cache = super.buildPythonPackage {
772 infrae.cache = super.buildPythonPackage {
786 name = "infrae.cache-1.0.1";
773 name = "infrae.cache-1.0.1";
787 buildInputs = with self; [];
774 buildInputs = with self; [];
788 doCheck = false;
775 doCheck = false;
789 propagatedBuildInputs = with self; [Beaker repoze.lru];
776 propagatedBuildInputs = with self; [Beaker repoze.lru];
790 src = fetchurl {
777 src = fetchurl {
791 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
778 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
792 md5 = "b09076a766747e6ed2a755cc62088e32";
779 md5 = "b09076a766747e6ed2a755cc62088e32";
793 };
780 };
794 meta = {
781 meta = {
795 license = [ pkgs.lib.licenses.zpt21 ];
782 license = [ pkgs.lib.licenses.zpt21 ];
796 };
783 };
797 };
784 };
798 invoke = super.buildPythonPackage {
785 invoke = super.buildPythonPackage {
799 name = "invoke-0.13.0";
786 name = "invoke-0.13.0";
800 buildInputs = with self; [];
787 buildInputs = with self; [];
801 doCheck = false;
788 doCheck = false;
802 propagatedBuildInputs = with self; [];
789 propagatedBuildInputs = with self; [];
803 src = fetchurl {
790 src = fetchurl {
804 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
791 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
805 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
792 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
806 };
793 };
807 meta = {
794 meta = {
808 license = [ pkgs.lib.licenses.bsdOriginal ];
795 license = [ pkgs.lib.licenses.bsdOriginal ];
809 };
796 };
810 };
797 };
811 ipdb = super.buildPythonPackage {
798 ipdb = super.buildPythonPackage {
812 name = "ipdb-0.10.1";
799 name = "ipdb-0.10.1";
813 buildInputs = with self; [];
800 buildInputs = with self; [];
814 doCheck = false;
801 doCheck = false;
815 propagatedBuildInputs = with self; [ipython setuptools];
802 propagatedBuildInputs = with self; [ipython setuptools];
816 src = fetchurl {
803 src = fetchurl {
817 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
804 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
818 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
805 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
819 };
806 };
820 meta = {
807 meta = {
821 license = [ pkgs.lib.licenses.bsdOriginal ];
808 license = [ pkgs.lib.licenses.bsdOriginal ];
822 };
809 };
823 };
810 };
824 ipython = super.buildPythonPackage {
811 ipython = super.buildPythonPackage {
825 name = "ipython-5.1.0";
812 name = "ipython-5.1.0";
826 buildInputs = with self; [];
813 buildInputs = with self; [];
827 doCheck = false;
814 doCheck = false;
828 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit Pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
815 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit Pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
829 src = fetchurl {
816 src = fetchurl {
830 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
817 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
831 md5 = "47c8122420f65b58784cb4b9b4af35e3";
818 md5 = "47c8122420f65b58784cb4b9b4af35e3";
832 };
819 };
833 meta = {
820 meta = {
834 license = [ pkgs.lib.licenses.bsdOriginal ];
821 license = [ pkgs.lib.licenses.bsdOriginal ];
835 };
822 };
836 };
823 };
837 ipython-genutils = super.buildPythonPackage {
824 ipython-genutils = super.buildPythonPackage {
838 name = "ipython-genutils-0.1.0";
825 name = "ipython-genutils-0.1.0";
839 buildInputs = with self; [];
826 buildInputs = with self; [];
840 doCheck = false;
827 doCheck = false;
841 propagatedBuildInputs = with self; [];
828 propagatedBuildInputs = with self; [];
842 src = fetchurl {
829 src = fetchurl {
843 url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz";
830 url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz";
844 md5 = "9a8afbe0978adbcbfcb3b35b2d015a56";
831 md5 = "9a8afbe0978adbcbfcb3b35b2d015a56";
845 };
832 };
846 meta = {
833 meta = {
847 license = [ pkgs.lib.licenses.bsdOriginal ];
834 license = [ pkgs.lib.licenses.bsdOriginal ];
848 };
835 };
849 };
836 };
850 iso8601 = super.buildPythonPackage {
837 iso8601 = super.buildPythonPackage {
851 name = "iso8601-0.1.11";
838 name = "iso8601-0.1.11";
852 buildInputs = with self; [];
839 buildInputs = with self; [];
853 doCheck = false;
840 doCheck = false;
854 propagatedBuildInputs = with self; [];
841 propagatedBuildInputs = with self; [];
855 src = fetchurl {
842 src = fetchurl {
856 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
843 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
857 md5 = "b06d11cd14a64096f907086044f0fe38";
844 md5 = "b06d11cd14a64096f907086044f0fe38";
858 };
845 };
859 meta = {
846 meta = {
860 license = [ pkgs.lib.licenses.mit ];
847 license = [ pkgs.lib.licenses.mit ];
861 };
848 };
862 };
849 };
863 itsdangerous = super.buildPythonPackage {
850 itsdangerous = super.buildPythonPackage {
864 name = "itsdangerous-0.24";
851 name = "itsdangerous-0.24";
865 buildInputs = with self; [];
852 buildInputs = with self; [];
866 doCheck = false;
853 doCheck = false;
867 propagatedBuildInputs = with self; [];
854 propagatedBuildInputs = with self; [];
868 src = fetchurl {
855 src = fetchurl {
869 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
856 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
870 md5 = "a3d55aa79369aef5345c036a8a26307f";
857 md5 = "a3d55aa79369aef5345c036a8a26307f";
871 };
858 };
872 meta = {
859 meta = {
873 license = [ pkgs.lib.licenses.bsdOriginal ];
860 license = [ pkgs.lib.licenses.bsdOriginal ];
874 };
861 };
875 };
862 };
876 kombu = super.buildPythonPackage {
863 kombu = super.buildPythonPackage {
877 name = "kombu-1.5.1";
864 name = "kombu-1.5.1";
878 buildInputs = with self; [];
865 buildInputs = with self; [];
879 doCheck = false;
866 doCheck = false;
880 propagatedBuildInputs = with self; [anyjson amqplib];
867 propagatedBuildInputs = with self; [anyjson amqplib];
881 src = fetchurl {
868 src = fetchurl {
882 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
869 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
883 md5 = "50662f3c7e9395b3d0721fb75d100b63";
870 md5 = "50662f3c7e9395b3d0721fb75d100b63";
884 };
871 };
885 meta = {
872 meta = {
886 license = [ pkgs.lib.licenses.bsdOriginal ];
873 license = [ pkgs.lib.licenses.bsdOriginal ];
887 };
874 };
888 };
875 };
889 lxml = super.buildPythonPackage {
876 lxml = super.buildPythonPackage {
890 name = "lxml-3.4.4";
877 name = "lxml-3.4.4";
891 buildInputs = with self; [];
878 buildInputs = with self; [];
892 doCheck = false;
879 doCheck = false;
893 propagatedBuildInputs = with self; [];
880 propagatedBuildInputs = with self; [];
894 src = fetchurl {
881 src = fetchurl {
895 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
882 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
896 md5 = "a9a65972afc173ec7a39c585f4eea69c";
883 md5 = "a9a65972afc173ec7a39c585f4eea69c";
897 };
884 };
898 meta = {
885 meta = {
899 license = [ pkgs.lib.licenses.bsdOriginal ];
886 license = [ pkgs.lib.licenses.bsdOriginal ];
900 };
887 };
901 };
888 };
902 meld3 = super.buildPythonPackage {
889 meld3 = super.buildPythonPackage {
903 name = "meld3-1.0.2";
890 name = "meld3-1.0.2";
904 buildInputs = with self; [];
891 buildInputs = with self; [];
905 doCheck = false;
892 doCheck = false;
906 propagatedBuildInputs = with self; [];
893 propagatedBuildInputs = with self; [];
907 src = fetchurl {
894 src = fetchurl {
908 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
895 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
909 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
896 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
910 };
897 };
911 meta = {
898 meta = {
912 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
899 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
913 };
900 };
914 };
901 };
915 mock = super.buildPythonPackage {
902 mock = super.buildPythonPackage {
916 name = "mock-1.0.1";
903 name = "mock-1.0.1";
917 buildInputs = with self; [];
904 buildInputs = with self; [];
918 doCheck = false;
905 doCheck = false;
919 propagatedBuildInputs = with self; [];
906 propagatedBuildInputs = with self; [];
920 src = fetchurl {
907 src = fetchurl {
921 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
908 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
922 md5 = "869f08d003c289a97c1a6610faf5e913";
909 md5 = "869f08d003c289a97c1a6610faf5e913";
923 };
910 };
924 meta = {
911 meta = {
925 license = [ pkgs.lib.licenses.bsdOriginal ];
912 license = [ pkgs.lib.licenses.bsdOriginal ];
926 };
913 };
927 };
914 };
928 msgpack-python = super.buildPythonPackage {
915 msgpack-python = super.buildPythonPackage {
929 name = "msgpack-python-0.4.8";
916 name = "msgpack-python-0.4.8";
930 buildInputs = with self; [];
917 buildInputs = with self; [];
931 doCheck = false;
918 doCheck = false;
932 propagatedBuildInputs = with self; [];
919 propagatedBuildInputs = with self; [];
933 src = fetchurl {
920 src = fetchurl {
934 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
921 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
935 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
922 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
936 };
923 };
937 meta = {
924 meta = {
938 license = [ pkgs.lib.licenses.asl20 ];
925 license = [ pkgs.lib.licenses.asl20 ];
939 };
926 };
940 };
927 };
941 nose = super.buildPythonPackage {
928 nose = super.buildPythonPackage {
942 name = "nose-1.3.6";
929 name = "nose-1.3.6";
943 buildInputs = with self; [];
930 buildInputs = with self; [];
944 doCheck = false;
931 doCheck = false;
945 propagatedBuildInputs = with self; [];
932 propagatedBuildInputs = with self; [];
946 src = fetchurl {
933 src = fetchurl {
947 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
934 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
948 md5 = "0ca546d81ca8309080fc80cb389e7a16";
935 md5 = "0ca546d81ca8309080fc80cb389e7a16";
949 };
936 };
950 meta = {
937 meta = {
951 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
938 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
952 };
939 };
953 };
940 };
954 objgraph = super.buildPythonPackage {
941 objgraph = super.buildPythonPackage {
955 name = "objgraph-2.0.0";
942 name = "objgraph-2.0.0";
956 buildInputs = with self; [];
943 buildInputs = with self; [];
957 doCheck = false;
944 doCheck = false;
958 propagatedBuildInputs = with self; [];
945 propagatedBuildInputs = with self; [];
959 src = fetchurl {
946 src = fetchurl {
960 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
947 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
961 md5 = "25b0d5e5adc74aa63ead15699614159c";
948 md5 = "25b0d5e5adc74aa63ead15699614159c";
962 };
949 };
963 meta = {
950 meta = {
964 license = [ pkgs.lib.licenses.mit ];
951 license = [ pkgs.lib.licenses.mit ];
965 };
952 };
966 };
953 };
967 packaging = super.buildPythonPackage {
954 packaging = super.buildPythonPackage {
968 name = "packaging-15.2";
955 name = "packaging-15.2";
969 buildInputs = with self; [];
956 buildInputs = with self; [];
970 doCheck = false;
957 doCheck = false;
971 propagatedBuildInputs = with self; [];
958 propagatedBuildInputs = with self; [];
972 src = fetchurl {
959 src = fetchurl {
973 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
960 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
974 md5 = "c16093476f6ced42128bf610e5db3784";
961 md5 = "c16093476f6ced42128bf610e5db3784";
975 };
962 };
976 meta = {
963 meta = {
977 license = [ pkgs.lib.licenses.asl20 ];
964 license = [ pkgs.lib.licenses.asl20 ];
978 };
965 };
979 };
966 };
980 paramiko = super.buildPythonPackage {
967 paramiko = super.buildPythonPackage {
981 name = "paramiko-1.15.1";
968 name = "paramiko-1.15.1";
982 buildInputs = with self; [];
969 buildInputs = with self; [];
983 doCheck = false;
970 doCheck = false;
984 propagatedBuildInputs = with self; [pycrypto ecdsa];
971 propagatedBuildInputs = with self; [pycrypto ecdsa];
985 src = fetchurl {
972 src = fetchurl {
986 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
973 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
987 md5 = "48c274c3f9b1282932567b21f6acf3b5";
974 md5 = "48c274c3f9b1282932567b21f6acf3b5";
988 };
975 };
989 meta = {
976 meta = {
990 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
977 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
991 };
978 };
992 };
979 };
993 pathlib2 = super.buildPythonPackage {
980 pathlib2 = super.buildPythonPackage {
994 name = "pathlib2-2.1.0";
981 name = "pathlib2-2.1.0";
995 buildInputs = with self; [];
982 buildInputs = with self; [];
996 doCheck = false;
983 doCheck = false;
997 propagatedBuildInputs = with self; [six];
984 propagatedBuildInputs = with self; [six];
998 src = fetchurl {
985 src = fetchurl {
999 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
986 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
1000 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
987 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
1001 };
988 };
1002 meta = {
989 meta = {
1003 license = [ pkgs.lib.licenses.mit ];
990 license = [ pkgs.lib.licenses.mit ];
1004 };
991 };
1005 };
992 };
1006 peppercorn = super.buildPythonPackage {
993 peppercorn = super.buildPythonPackage {
1007 name = "peppercorn-0.5";
994 name = "peppercorn-0.5";
1008 buildInputs = with self; [];
995 buildInputs = with self; [];
1009 doCheck = false;
996 doCheck = false;
1010 propagatedBuildInputs = with self; [];
997 propagatedBuildInputs = with self; [];
1011 src = fetchurl {
998 src = fetchurl {
1012 url = "https://pypi.python.org/packages/45/ec/a62ec317d1324a01567c5221b420742f094f05ee48097e5157d32be3755c/peppercorn-0.5.tar.gz";
999 url = "https://pypi.python.org/packages/45/ec/a62ec317d1324a01567c5221b420742f094f05ee48097e5157d32be3755c/peppercorn-0.5.tar.gz";
1013 md5 = "f08efbca5790019ab45d76b7244abd40";
1000 md5 = "f08efbca5790019ab45d76b7244abd40";
1014 };
1001 };
1015 meta = {
1002 meta = {
1016 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1003 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1017 };
1004 };
1018 };
1005 };
1019 pexpect = super.buildPythonPackage {
1006 pexpect = super.buildPythonPackage {
1020 name = "pexpect-4.2.1";
1007 name = "pexpect-4.2.1";
1021 buildInputs = with self; [];
1008 buildInputs = with self; [];
1022 doCheck = false;
1009 doCheck = false;
1023 propagatedBuildInputs = with self; [ptyprocess];
1010 propagatedBuildInputs = with self; [ptyprocess];
1024 src = fetchurl {
1011 src = fetchurl {
1025 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
1012 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
1026 md5 = "3694410001a99dff83f0b500a1ca1c95";
1013 md5 = "3694410001a99dff83f0b500a1ca1c95";
1027 };
1014 };
1028 meta = {
1015 meta = {
1029 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1016 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1030 };
1017 };
1031 };
1018 };
1032 pickleshare = super.buildPythonPackage {
1019 pickleshare = super.buildPythonPackage {
1033 name = "pickleshare-0.7.4";
1020 name = "pickleshare-0.7.4";
1034 buildInputs = with self; [];
1021 buildInputs = with self; [];
1035 doCheck = false;
1022 doCheck = false;
1036 propagatedBuildInputs = with self; [pathlib2];
1023 propagatedBuildInputs = with self; [pathlib2];
1037 src = fetchurl {
1024 src = fetchurl {
1038 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
1025 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
1039 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
1026 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
1040 };
1027 };
1041 meta = {
1028 meta = {
1042 license = [ pkgs.lib.licenses.mit ];
1029 license = [ pkgs.lib.licenses.mit ];
1043 };
1030 };
1044 };
1031 };
1045 prompt-toolkit = super.buildPythonPackage {
1032 prompt-toolkit = super.buildPythonPackage {
1046 name = "prompt-toolkit-1.0.9";
1033 name = "prompt-toolkit-1.0.9";
1047 buildInputs = with self; [];
1034 buildInputs = with self; [];
1048 doCheck = false;
1035 doCheck = false;
1049 propagatedBuildInputs = with self; [six wcwidth];
1036 propagatedBuildInputs = with self; [six wcwidth];
1050 src = fetchurl {
1037 src = fetchurl {
1051 url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz";
1038 url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz";
1052 md5 = "a39f91a54308fb7446b1a421c11f227c";
1039 md5 = "a39f91a54308fb7446b1a421c11f227c";
1053 };
1040 };
1054 meta = {
1041 meta = {
1055 license = [ pkgs.lib.licenses.bsdOriginal ];
1042 license = [ pkgs.lib.licenses.bsdOriginal ];
1056 };
1043 };
1057 };
1044 };
1058 psutil = super.buildPythonPackage {
1045 psutil = super.buildPythonPackage {
1059 name = "psutil-4.3.1";
1046 name = "psutil-4.3.1";
1060 buildInputs = with self; [];
1047 buildInputs = with self; [];
1061 doCheck = false;
1048 doCheck = false;
1062 propagatedBuildInputs = with self; [];
1049 propagatedBuildInputs = with self; [];
1063 src = fetchurl {
1050 src = fetchurl {
1064 url = "https://pypi.python.org/packages/78/cc/f267a1371f229bf16db6a4e604428c3b032b823b83155bd33cef45e49a53/psutil-4.3.1.tar.gz";
1051 url = "https://pypi.python.org/packages/78/cc/f267a1371f229bf16db6a4e604428c3b032b823b83155bd33cef45e49a53/psutil-4.3.1.tar.gz";
1065 md5 = "199a366dba829c88bddaf5b41d19ddc0";
1052 md5 = "199a366dba829c88bddaf5b41d19ddc0";
1066 };
1053 };
1067 meta = {
1054 meta = {
1068 license = [ pkgs.lib.licenses.bsdOriginal ];
1055 license = [ pkgs.lib.licenses.bsdOriginal ];
1069 };
1056 };
1070 };
1057 };
1071 psycopg2 = super.buildPythonPackage {
1058 psycopg2 = super.buildPythonPackage {
1072 name = "psycopg2-2.6.1";
1059 name = "psycopg2-2.6.1";
1073 buildInputs = with self; [];
1060 buildInputs = with self; [];
1074 doCheck = false;
1061 doCheck = false;
1075 propagatedBuildInputs = with self; [];
1062 propagatedBuildInputs = with self; [];
1076 src = fetchurl {
1063 src = fetchurl {
1077 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
1064 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
1078 md5 = "842b44f8c95517ed5b792081a2370da1";
1065 md5 = "842b44f8c95517ed5b792081a2370da1";
1079 };
1066 };
1080 meta = {
1067 meta = {
1081 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1068 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1082 };
1069 };
1083 };
1070 };
1084 ptyprocess = super.buildPythonPackage {
1071 ptyprocess = super.buildPythonPackage {
1085 name = "ptyprocess-0.5.1";
1072 name = "ptyprocess-0.5.1";
1086 buildInputs = with self; [];
1073 buildInputs = with self; [];
1087 doCheck = false;
1074 doCheck = false;
1088 propagatedBuildInputs = with self; [];
1075 propagatedBuildInputs = with self; [];
1089 src = fetchurl {
1076 src = fetchurl {
1090 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
1077 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
1091 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
1078 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
1092 };
1079 };
1093 meta = {
1080 meta = {
1094 license = [ ];
1081 license = [ ];
1095 };
1082 };
1096 };
1083 };
1097 py = super.buildPythonPackage {
1084 py = super.buildPythonPackage {
1098 name = "py-1.4.31";
1085 name = "py-1.4.31";
1099 buildInputs = with self; [];
1086 buildInputs = with self; [];
1100 doCheck = false;
1087 doCheck = false;
1101 propagatedBuildInputs = with self; [];
1088 propagatedBuildInputs = with self; [];
1102 src = fetchurl {
1089 src = fetchurl {
1103 url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz";
1090 url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz";
1104 md5 = "5d2c63c56dc3f2115ec35c066ecd582b";
1091 md5 = "5d2c63c56dc3f2115ec35c066ecd582b";
1105 };
1092 };
1106 meta = {
1093 meta = {
1107 license = [ pkgs.lib.licenses.mit ];
1094 license = [ pkgs.lib.licenses.mit ];
1108 };
1095 };
1109 };
1096 };
1110 py-bcrypt = super.buildPythonPackage {
1097 py-bcrypt = super.buildPythonPackage {
1111 name = "py-bcrypt-0.4";
1098 name = "py-bcrypt-0.4";
1112 buildInputs = with self; [];
1099 buildInputs = with self; [];
1113 doCheck = false;
1100 doCheck = false;
1114 propagatedBuildInputs = with self; [];
1101 propagatedBuildInputs = with self; [];
1115 src = fetchurl {
1102 src = fetchurl {
1116 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1103 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1117 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1104 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1118 };
1105 };
1119 meta = {
1106 meta = {
1120 license = [ pkgs.lib.licenses.bsdOriginal ];
1107 license = [ pkgs.lib.licenses.bsdOriginal ];
1121 };
1108 };
1122 };
1109 };
1123 py-gfm = super.buildPythonPackage {
1110 py-gfm = super.buildPythonPackage {
1124 name = "py-gfm-0.1.3";
1111 name = "py-gfm-0.1.3";
1125 buildInputs = with self; [];
1112 buildInputs = with self; [];
1126 doCheck = false;
1113 doCheck = false;
1127 propagatedBuildInputs = with self; [setuptools Markdown];
1114 propagatedBuildInputs = with self; [setuptools Markdown];
1128 src = fetchurl {
1115 src = fetchurl {
1129 url = "https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16";
1116 url = "https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16";
1130 md5 = "0d0d5385bfb629eea636a80b9c2bfd16";
1117 md5 = "0d0d5385bfb629eea636a80b9c2bfd16";
1131 };
1118 };
1132 meta = {
1119 meta = {
1133 license = [ pkgs.lib.licenses.bsdOriginal ];
1120 license = [ pkgs.lib.licenses.bsdOriginal ];
1134 };
1121 };
1135 };
1122 };
1136 pycrypto = super.buildPythonPackage {
1123 pycrypto = super.buildPythonPackage {
1137 name = "pycrypto-2.6.1";
1124 name = "pycrypto-2.6.1";
1138 buildInputs = with self; [];
1125 buildInputs = with self; [];
1139 doCheck = false;
1126 doCheck = false;
1140 propagatedBuildInputs = with self; [];
1127 propagatedBuildInputs = with self; [];
1141 src = fetchurl {
1128 src = fetchurl {
1142 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1129 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1143 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1130 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1144 };
1131 };
1145 meta = {
1132 meta = {
1146 license = [ pkgs.lib.licenses.publicDomain ];
1133 license = [ pkgs.lib.licenses.publicDomain ];
1147 };
1134 };
1148 };
1135 };
1149 pycurl = super.buildPythonPackage {
1136 pycurl = super.buildPythonPackage {
1150 name = "pycurl-7.19.5";
1137 name = "pycurl-7.19.5";
1151 buildInputs = with self; [];
1138 buildInputs = with self; [];
1152 doCheck = false;
1139 doCheck = false;
1153 propagatedBuildInputs = with self; [];
1140 propagatedBuildInputs = with self; [];
1154 src = fetchurl {
1141 src = fetchurl {
1155 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1142 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1156 md5 = "47b4eac84118e2606658122104e62072";
1143 md5 = "47b4eac84118e2606658122104e62072";
1157 };
1144 };
1158 meta = {
1145 meta = {
1159 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1146 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1160 };
1147 };
1161 };
1148 };
1162 pyflakes = super.buildPythonPackage {
1149 pyflakes = super.buildPythonPackage {
1163 name = "pyflakes-0.8.1";
1150 name = "pyflakes-0.8.1";
1164 buildInputs = with self; [];
1151 buildInputs = with self; [];
1165 doCheck = false;
1152 doCheck = false;
1166 propagatedBuildInputs = with self; [];
1153 propagatedBuildInputs = with self; [];
1167 src = fetchurl {
1154 src = fetchurl {
1168 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1155 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1169 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1156 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1170 };
1157 };
1171 meta = {
1158 meta = {
1172 license = [ pkgs.lib.licenses.mit ];
1159 license = [ pkgs.lib.licenses.mit ];
1173 };
1160 };
1174 };
1161 };
1175 pygments-markdown-lexer = super.buildPythonPackage {
1162 pygments-markdown-lexer = super.buildPythonPackage {
1176 name = "pygments-markdown-lexer-0.1.0.dev39";
1163 name = "pygments-markdown-lexer-0.1.0.dev39";
1177 buildInputs = with self; [];
1164 buildInputs = with self; [];
1178 doCheck = false;
1165 doCheck = false;
1179 propagatedBuildInputs = with self; [Pygments];
1166 propagatedBuildInputs = with self; [Pygments];
1180 src = fetchurl {
1167 src = fetchurl {
1181 url = "https://pypi.python.org/packages/c3/12/674cdee66635d638cedb2c5d9c85ce507b7b2f91bdba29e482f1b1160ff6/pygments-markdown-lexer-0.1.0.dev39.zip";
1168 url = "https://pypi.python.org/packages/c3/12/674cdee66635d638cedb2c5d9c85ce507b7b2f91bdba29e482f1b1160ff6/pygments-markdown-lexer-0.1.0.dev39.zip";
1182 md5 = "6360fe0f6d1f896e35b7a0142ce6459c";
1169 md5 = "6360fe0f6d1f896e35b7a0142ce6459c";
1183 };
1170 };
1184 meta = {
1171 meta = {
1185 license = [ pkgs.lib.licenses.asl20 ];
1172 license = [ pkgs.lib.licenses.asl20 ];
1186 };
1173 };
1187 };
1174 };
1188 pyparsing = super.buildPythonPackage {
1175 pyparsing = super.buildPythonPackage {
1189 name = "pyparsing-1.5.7";
1176 name = "pyparsing-1.5.7";
1190 buildInputs = with self; [];
1177 buildInputs = with self; [];
1191 doCheck = false;
1178 doCheck = false;
1192 propagatedBuildInputs = with self; [];
1179 propagatedBuildInputs = with self; [];
1193 src = fetchurl {
1180 src = fetchurl {
1194 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1181 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1195 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1182 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1196 };
1183 };
1197 meta = {
1184 meta = {
1198 license = [ pkgs.lib.licenses.mit ];
1185 license = [ pkgs.lib.licenses.mit ];
1199 };
1186 };
1200 };
1187 };
1201 pyramid = super.buildPythonPackage {
1188 pyramid = super.buildPythonPackage {
1202 name = "pyramid-1.7.4";
1189 name = "pyramid-1.7.4";
1203 buildInputs = with self; [];
1190 buildInputs = with self; [];
1204 doCheck = false;
1191 doCheck = false;
1205 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1192 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1206 src = fetchurl {
1193 src = fetchurl {
1207 url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz";
1194 url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz";
1208 md5 = "6ef1dfdcff9136d04490410757c4c446";
1195 md5 = "6ef1dfdcff9136d04490410757c4c446";
1209 };
1196 };
1210 meta = {
1197 meta = {
1211 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1198 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1212 };
1199 };
1213 };
1200 };
1214 pyramid-beaker = super.buildPythonPackage {
1201 pyramid-beaker = super.buildPythonPackage {
1215 name = "pyramid-beaker-0.8";
1202 name = "pyramid-beaker-0.8";
1216 buildInputs = with self; [];
1203 buildInputs = with self; [];
1217 doCheck = false;
1204 doCheck = false;
1218 propagatedBuildInputs = with self; [pyramid Beaker];
1205 propagatedBuildInputs = with self; [pyramid Beaker];
1219 src = fetchurl {
1206 src = fetchurl {
1220 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1207 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1221 md5 = "22f14be31b06549f80890e2c63a93834";
1208 md5 = "22f14be31b06549f80890e2c63a93834";
1222 };
1209 };
1223 meta = {
1210 meta = {
1224 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1211 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1225 };
1212 };
1226 };
1213 };
1227 pyramid-debugtoolbar = super.buildPythonPackage {
1214 pyramid-debugtoolbar = super.buildPythonPackage {
1228 name = "pyramid-debugtoolbar-3.0.5";
1215 name = "pyramid-debugtoolbar-3.0.5";
1229 buildInputs = with self; [];
1216 buildInputs = with self; [];
1230 doCheck = false;
1217 doCheck = false;
1231 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1218 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1232 src = fetchurl {
1219 src = fetchurl {
1233 url = "https://pypi.python.org/packages/64/0e/df00bfb55605900e7a2f7e4a18dd83575a6651688e297d5a0aa4c208fd7d/pyramid_debugtoolbar-3.0.5.tar.gz";
1220 url = "https://pypi.python.org/packages/64/0e/df00bfb55605900e7a2f7e4a18dd83575a6651688e297d5a0aa4c208fd7d/pyramid_debugtoolbar-3.0.5.tar.gz";
1234 md5 = "aebab8c3bfdc6f89e4d3adc1d126538e";
1221 md5 = "aebab8c3bfdc6f89e4d3adc1d126538e";
1235 };
1222 };
1236 meta = {
1223 meta = {
1237 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1224 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1238 };
1225 };
1239 };
1226 };
1240 pyramid-jinja2 = super.buildPythonPackage {
1227 pyramid-jinja2 = super.buildPythonPackage {
1241 name = "pyramid-jinja2-2.5";
1228 name = "pyramid-jinja2-2.5";
1242 buildInputs = with self; [];
1229 buildInputs = with self; [];
1243 doCheck = false;
1230 doCheck = false;
1244 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1231 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1245 src = fetchurl {
1232 src = fetchurl {
1246 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1233 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1247 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1234 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1248 };
1235 };
1249 meta = {
1236 meta = {
1250 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1237 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1251 };
1238 };
1252 };
1239 };
1253 pyramid-mako = super.buildPythonPackage {
1240 pyramid-mako = super.buildPythonPackage {
1254 name = "pyramid-mako-1.0.2";
1241 name = "pyramid-mako-1.0.2";
1255 buildInputs = with self; [];
1242 buildInputs = with self; [];
1256 doCheck = false;
1243 doCheck = false;
1257 propagatedBuildInputs = with self; [pyramid Mako];
1244 propagatedBuildInputs = with self; [pyramid Mako];
1258 src = fetchurl {
1245 src = fetchurl {
1259 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1246 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1260 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1247 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1261 };
1248 };
1262 meta = {
1249 meta = {
1263 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1250 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1264 };
1251 };
1265 };
1252 };
1266 pysqlite = super.buildPythonPackage {
1253 pysqlite = super.buildPythonPackage {
1267 name = "pysqlite-2.6.3";
1254 name = "pysqlite-2.6.3";
1268 buildInputs = with self; [];
1255 buildInputs = with self; [];
1269 doCheck = false;
1256 doCheck = false;
1270 propagatedBuildInputs = with self; [];
1257 propagatedBuildInputs = with self; [];
1271 src = fetchurl {
1258 src = fetchurl {
1272 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1259 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1273 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1260 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1274 };
1261 };
1275 meta = {
1262 meta = {
1276 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1263 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1277 };
1264 };
1278 };
1265 };
1279 pytest = super.buildPythonPackage {
1266 pytest = super.buildPythonPackage {
1280 name = "pytest-3.0.5";
1267 name = "pytest-3.0.5";
1281 buildInputs = with self; [];
1268 buildInputs = with self; [];
1282 doCheck = false;
1269 doCheck = false;
1283 propagatedBuildInputs = with self; [py];
1270 propagatedBuildInputs = with self; [py];
1284 src = fetchurl {
1271 src = fetchurl {
1285 url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz";
1272 url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz";
1286 md5 = "cefd527b59332688bf5db4a10aa8a7cb";
1273 md5 = "cefd527b59332688bf5db4a10aa8a7cb";
1287 };
1274 };
1288 meta = {
1275 meta = {
1289 license = [ pkgs.lib.licenses.mit ];
1276 license = [ pkgs.lib.licenses.mit ];
1290 };
1277 };
1291 };
1278 };
1292 pytest-catchlog = super.buildPythonPackage {
1279 pytest-catchlog = super.buildPythonPackage {
1293 name = "pytest-catchlog-1.2.2";
1280 name = "pytest-catchlog-1.2.2";
1294 buildInputs = with self; [];
1281 buildInputs = with self; [];
1295 doCheck = false;
1282 doCheck = false;
1296 propagatedBuildInputs = with self; [py pytest];
1283 propagatedBuildInputs = with self; [py pytest];
1297 src = fetchurl {
1284 src = fetchurl {
1298 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1285 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1299 md5 = "09d890c54c7456c818102b7ff8c182c8";
1286 md5 = "09d890c54c7456c818102b7ff8c182c8";
1300 };
1287 };
1301 meta = {
1288 meta = {
1302 license = [ pkgs.lib.licenses.mit ];
1289 license = [ pkgs.lib.licenses.mit ];
1303 };
1290 };
1304 };
1291 };
1305 pytest-cov = super.buildPythonPackage {
1292 pytest-cov = super.buildPythonPackage {
1306 name = "pytest-cov-2.4.0";
1293 name = "pytest-cov-2.4.0";
1307 buildInputs = with self; [];
1294 buildInputs = with self; [];
1308 doCheck = false;
1295 doCheck = false;
1309 propagatedBuildInputs = with self; [pytest coverage];
1296 propagatedBuildInputs = with self; [pytest coverage];
1310 src = fetchurl {
1297 src = fetchurl {
1311 url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz";
1298 url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz";
1312 md5 = "2fda09677d232acc99ec1b3c5831e33f";
1299 md5 = "2fda09677d232acc99ec1b3c5831e33f";
1313 };
1300 };
1314 meta = {
1301 meta = {
1315 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1302 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1316 };
1303 };
1317 };
1304 };
1318 pytest-profiling = super.buildPythonPackage {
1305 pytest-profiling = super.buildPythonPackage {
1319 name = "pytest-profiling-1.2.2";
1306 name = "pytest-profiling-1.2.2";
1320 buildInputs = with self; [];
1307 buildInputs = with self; [];
1321 doCheck = false;
1308 doCheck = false;
1322 propagatedBuildInputs = with self; [six pytest gprof2dot];
1309 propagatedBuildInputs = with self; [six pytest gprof2dot];
1323 src = fetchurl {
1310 src = fetchurl {
1324 url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz";
1311 url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz";
1325 md5 = "0a16d7dda2d23b91e9730fa4558cf728";
1312 md5 = "0a16d7dda2d23b91e9730fa4558cf728";
1326 };
1313 };
1327 meta = {
1314 meta = {
1328 license = [ pkgs.lib.licenses.mit ];
1315 license = [ pkgs.lib.licenses.mit ];
1329 };
1316 };
1330 };
1317 };
1331 pytest-runner = super.buildPythonPackage {
1318 pytest-runner = super.buildPythonPackage {
1332 name = "pytest-runner-2.9";
1319 name = "pytest-runner-2.9";
1333 buildInputs = with self; [];
1320 buildInputs = with self; [];
1334 doCheck = false;
1321 doCheck = false;
1335 propagatedBuildInputs = with self; [];
1322 propagatedBuildInputs = with self; [];
1336 src = fetchurl {
1323 src = fetchurl {
1337 url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz";
1324 url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz";
1338 md5 = "2212a2e34404b0960b2fdc2c469247b2";
1325 md5 = "2212a2e34404b0960b2fdc2c469247b2";
1339 };
1326 };
1340 meta = {
1327 meta = {
1341 license = [ pkgs.lib.licenses.mit ];
1328 license = [ pkgs.lib.licenses.mit ];
1342 };
1329 };
1343 };
1330 };
1344 pytest-sugar = super.buildPythonPackage {
1331 pytest-sugar = super.buildPythonPackage {
1345 name = "pytest-sugar-0.7.1";
1332 name = "pytest-sugar-0.7.1";
1346 buildInputs = with self; [];
1333 buildInputs = with self; [];
1347 doCheck = false;
1334 doCheck = false;
1348 propagatedBuildInputs = with self; [pytest termcolor];
1335 propagatedBuildInputs = with self; [pytest termcolor];
1349 src = fetchurl {
1336 src = fetchurl {
1350 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
1337 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
1351 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
1338 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
1352 };
1339 };
1353 meta = {
1340 meta = {
1354 license = [ pkgs.lib.licenses.bsdOriginal ];
1341 license = [ pkgs.lib.licenses.bsdOriginal ];
1355 };
1342 };
1356 };
1343 };
1357 pytest-timeout = super.buildPythonPackage {
1344 pytest-timeout = super.buildPythonPackage {
1358 name = "pytest-timeout-1.2.0";
1345 name = "pytest-timeout-1.2.0";
1359 buildInputs = with self; [];
1346 buildInputs = with self; [];
1360 doCheck = false;
1347 doCheck = false;
1361 propagatedBuildInputs = with self; [pytest];
1348 propagatedBuildInputs = with self; [pytest];
1362 src = fetchurl {
1349 src = fetchurl {
1363 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
1350 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
1364 md5 = "83607d91aa163562c7ee835da57d061d";
1351 md5 = "83607d91aa163562c7ee835da57d061d";
1365 };
1352 };
1366 meta = {
1353 meta = {
1367 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1354 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1368 };
1355 };
1369 };
1356 };
1370 python-dateutil = super.buildPythonPackage {
1357 python-dateutil = super.buildPythonPackage {
1371 name = "python-dateutil-1.5";
1358 name = "python-dateutil-1.5";
1372 buildInputs = with self; [];
1359 buildInputs = with self; [];
1373 doCheck = false;
1360 doCheck = false;
1374 propagatedBuildInputs = with self; [];
1361 propagatedBuildInputs = with self; [];
1375 src = fetchurl {
1362 src = fetchurl {
1376 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1363 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1377 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1364 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1378 };
1365 };
1379 meta = {
1366 meta = {
1380 license = [ pkgs.lib.licenses.psfl ];
1367 license = [ pkgs.lib.licenses.psfl ];
1381 };
1368 };
1382 };
1369 };
1383 python-editor = super.buildPythonPackage {
1370 python-editor = super.buildPythonPackage {
1384 name = "python-editor-1.0.3";
1371 name = "python-editor-1.0.3";
1385 buildInputs = with self; [];
1372 buildInputs = with self; [];
1386 doCheck = false;
1373 doCheck = false;
1387 propagatedBuildInputs = with self; [];
1374 propagatedBuildInputs = with self; [];
1388 src = fetchurl {
1375 src = fetchurl {
1389 url = "https://pypi.python.org/packages/65/1e/adf6e000ea5dc909aa420352d6ba37f16434c8a3c2fa030445411a1ed545/python-editor-1.0.3.tar.gz";
1376 url = "https://pypi.python.org/packages/65/1e/adf6e000ea5dc909aa420352d6ba37f16434c8a3c2fa030445411a1ed545/python-editor-1.0.3.tar.gz";
1390 md5 = "0aca5f2ef176ce68e98a5b7e31372835";
1377 md5 = "0aca5f2ef176ce68e98a5b7e31372835";
1391 };
1378 };
1392 meta = {
1379 meta = {
1393 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1380 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1394 };
1381 };
1395 };
1382 };
1396 python-ldap = super.buildPythonPackage {
1383 python-ldap = super.buildPythonPackage {
1397 name = "python-ldap-2.4.19";
1384 name = "python-ldap-2.4.19";
1398 buildInputs = with self; [];
1385 buildInputs = with self; [];
1399 doCheck = false;
1386 doCheck = false;
1400 propagatedBuildInputs = with self; [setuptools];
1387 propagatedBuildInputs = with self; [setuptools];
1401 src = fetchurl {
1388 src = fetchurl {
1402 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1389 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1403 md5 = "b941bf31d09739492aa19ef679e94ae3";
1390 md5 = "b941bf31d09739492aa19ef679e94ae3";
1404 };
1391 };
1405 meta = {
1392 meta = {
1406 license = [ pkgs.lib.licenses.psfl ];
1393 license = [ pkgs.lib.licenses.psfl ];
1407 };
1394 };
1408 };
1395 };
1409 python-memcached = super.buildPythonPackage {
1396 python-memcached = super.buildPythonPackage {
1410 name = "python-memcached-1.57";
1397 name = "python-memcached-1.57";
1411 buildInputs = with self; [];
1398 buildInputs = with self; [];
1412 doCheck = false;
1399 doCheck = false;
1413 propagatedBuildInputs = with self; [six];
1400 propagatedBuildInputs = with self; [six];
1414 src = fetchurl {
1401 src = fetchurl {
1415 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1402 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1416 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1403 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1417 };
1404 };
1418 meta = {
1405 meta = {
1419 license = [ pkgs.lib.licenses.psfl ];
1406 license = [ pkgs.lib.licenses.psfl ];
1420 };
1407 };
1421 };
1408 };
1422 python-pam = super.buildPythonPackage {
1409 python-pam = super.buildPythonPackage {
1423 name = "python-pam-1.8.2";
1410 name = "python-pam-1.8.2";
1424 buildInputs = with self; [];
1411 buildInputs = with self; [];
1425 doCheck = false;
1412 doCheck = false;
1426 propagatedBuildInputs = with self; [];
1413 propagatedBuildInputs = with self; [];
1427 src = fetchurl {
1414 src = fetchurl {
1428 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1415 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1429 md5 = "db71b6b999246fb05d78ecfbe166629d";
1416 md5 = "db71b6b999246fb05d78ecfbe166629d";
1430 };
1417 };
1431 meta = {
1418 meta = {
1432 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1419 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1433 };
1420 };
1434 };
1421 };
1435 pytz = super.buildPythonPackage {
1422 pytz = super.buildPythonPackage {
1436 name = "pytz-2015.4";
1423 name = "pytz-2015.4";
1437 buildInputs = with self; [];
1424 buildInputs = with self; [];
1438 doCheck = false;
1425 doCheck = false;
1439 propagatedBuildInputs = with self; [];
1426 propagatedBuildInputs = with self; [];
1440 src = fetchurl {
1427 src = fetchurl {
1441 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1428 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1442 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1429 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1443 };
1430 };
1444 meta = {
1431 meta = {
1445 license = [ pkgs.lib.licenses.mit ];
1432 license = [ pkgs.lib.licenses.mit ];
1446 };
1433 };
1447 };
1434 };
1448 pyzmq = super.buildPythonPackage {
1435 pyzmq = super.buildPythonPackage {
1449 name = "pyzmq-14.6.0";
1436 name = "pyzmq-14.6.0";
1450 buildInputs = with self; [];
1437 buildInputs = with self; [];
1451 doCheck = false;
1438 doCheck = false;
1452 propagatedBuildInputs = with self; [];
1439 propagatedBuildInputs = with self; [];
1453 src = fetchurl {
1440 src = fetchurl {
1454 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1441 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1455 md5 = "395b5de95a931afa5b14c9349a5b8024";
1442 md5 = "395b5de95a931afa5b14c9349a5b8024";
1456 };
1443 };
1457 meta = {
1444 meta = {
1458 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1445 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1459 };
1446 };
1460 };
1447 };
1461 recaptcha-client = super.buildPythonPackage {
1448 recaptcha-client = super.buildPythonPackage {
1462 name = "recaptcha-client-1.0.6";
1449 name = "recaptcha-client-1.0.6";
1463 buildInputs = with self; [];
1450 buildInputs = with self; [];
1464 doCheck = false;
1451 doCheck = false;
1465 propagatedBuildInputs = with self; [];
1452 propagatedBuildInputs = with self; [];
1466 src = fetchurl {
1453 src = fetchurl {
1467 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1454 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1468 md5 = "74228180f7e1fb76c4d7089160b0d919";
1455 md5 = "74228180f7e1fb76c4d7089160b0d919";
1469 };
1456 };
1470 meta = {
1457 meta = {
1471 license = [ { fullName = "MIT/X11"; } ];
1458 license = [ { fullName = "MIT/X11"; } ];
1472 };
1459 };
1473 };
1460 };
1474 repoze.lru = super.buildPythonPackage {
1461 repoze.lru = super.buildPythonPackage {
1475 name = "repoze.lru-0.6";
1462 name = "repoze.lru-0.6";
1476 buildInputs = with self; [];
1463 buildInputs = with self; [];
1477 doCheck = false;
1464 doCheck = false;
1478 propagatedBuildInputs = with self; [];
1465 propagatedBuildInputs = with self; [];
1479 src = fetchurl {
1466 src = fetchurl {
1480 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1467 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1481 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1468 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1482 };
1469 };
1483 meta = {
1470 meta = {
1484 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1471 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1485 };
1472 };
1486 };
1473 };
1487 requests = super.buildPythonPackage {
1474 requests = super.buildPythonPackage {
1488 name = "requests-2.9.1";
1475 name = "requests-2.9.1";
1489 buildInputs = with self; [];
1476 buildInputs = with self; [];
1490 doCheck = false;
1477 doCheck = false;
1491 propagatedBuildInputs = with self; [];
1478 propagatedBuildInputs = with self; [];
1492 src = fetchurl {
1479 src = fetchurl {
1493 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1480 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1494 md5 = "0b7f480d19012ec52bab78292efd976d";
1481 md5 = "0b7f480d19012ec52bab78292efd976d";
1495 };
1482 };
1496 meta = {
1483 meta = {
1497 license = [ pkgs.lib.licenses.asl20 ];
1484 license = [ pkgs.lib.licenses.asl20 ];
1498 };
1485 };
1499 };
1486 };
1500 rhodecode-enterprise-ce = super.buildPythonPackage {
1487 rhodecode-enterprise-ce = super.buildPythonPackage {
1501 name = "rhodecode-enterprise-ce-4.7.0";
1488 name = "rhodecode-enterprise-ce-4.7.0";
1502 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage cssselect lxml configobj];
1489 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage cssselect lxml configobj];
1503 doCheck = true;
1490 doCheck = true;
1504 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery channelstream colander decorator deform docutils gevent gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson subprocess32 waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1491 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery channelstream colander decorator deform docutils gevent gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson subprocess32 waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1505 src = ./.;
1492 src = ./.;
1506 meta = {
1493 meta = {
1507 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1494 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1508 };
1495 };
1509 };
1496 };
1510 rhodecode-tools = super.buildPythonPackage {
1497 rhodecode-tools = super.buildPythonPackage {
1511 name = "rhodecode-tools-0.11.0";
1498 name = "rhodecode-tools-0.11.0";
1512 buildInputs = with self; [];
1499 buildInputs = with self; [];
1513 doCheck = false;
1500 doCheck = false;
1514 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests elasticsearch elasticsearch-dsl urllib3 Whoosh];
1501 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests elasticsearch elasticsearch-dsl urllib3 Whoosh];
1515 src = fetchurl {
1502 src = fetchurl {
1516 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.11.0.tar.gz?md5=e5fd0a8363af08a0ced71b50ca9cce15";
1503 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.11.0.tar.gz?md5=e5fd0a8363af08a0ced71b50ca9cce15";
1517 md5 = "e5fd0a8363af08a0ced71b50ca9cce15";
1504 md5 = "e5fd0a8363af08a0ced71b50ca9cce15";
1518 };
1505 };
1519 meta = {
1506 meta = {
1520 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1507 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1521 };
1508 };
1522 };
1509 };
1523 serpent = super.buildPythonPackage {
1524 name = "serpent-1.15";
1525 buildInputs = with self; [];
1526 doCheck = false;
1527 propagatedBuildInputs = with self; [];
1528 src = fetchurl {
1529 url = "https://pypi.python.org/packages/7b/38/b2b27673a882ff2ea5871bb3e3e6b496ebbaafd1612e51990ffb158b9254/serpent-1.15.tar.gz";
1530 md5 = "e27b1aad5c218e16442f52abb7c7053a";
1531 };
1532 meta = {
1533 license = [ pkgs.lib.licenses.mit ];
1534 };
1535 };
1536 setproctitle = super.buildPythonPackage {
1510 setproctitle = super.buildPythonPackage {
1537 name = "setproctitle-1.1.8";
1511 name = "setproctitle-1.1.8";
1538 buildInputs = with self; [];
1512 buildInputs = with self; [];
1539 doCheck = false;
1513 doCheck = false;
1540 propagatedBuildInputs = with self; [];
1514 propagatedBuildInputs = with self; [];
1541 src = fetchurl {
1515 src = fetchurl {
1542 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1516 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1543 md5 = "728f4c8c6031bbe56083a48594027edd";
1517 md5 = "728f4c8c6031bbe56083a48594027edd";
1544 };
1518 };
1545 meta = {
1519 meta = {
1546 license = [ pkgs.lib.licenses.bsdOriginal ];
1520 license = [ pkgs.lib.licenses.bsdOriginal ];
1547 };
1521 };
1548 };
1522 };
1549 setuptools = super.buildPythonPackage {
1523 setuptools = super.buildPythonPackage {
1550 name = "setuptools-30.1.0";
1524 name = "setuptools-30.1.0";
1551 buildInputs = with self; [];
1525 buildInputs = with self; [];
1552 doCheck = false;
1526 doCheck = false;
1553 propagatedBuildInputs = with self; [];
1527 propagatedBuildInputs = with self; [];
1554 src = fetchurl {
1528 src = fetchurl {
1555 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
1529 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
1556 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
1530 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
1557 };
1531 };
1558 meta = {
1532 meta = {
1559 license = [ pkgs.lib.licenses.mit ];
1533 license = [ pkgs.lib.licenses.mit ];
1560 };
1534 };
1561 };
1535 };
1562 setuptools-scm = super.buildPythonPackage {
1536 setuptools-scm = super.buildPythonPackage {
1563 name = "setuptools-scm-1.15.0";
1537 name = "setuptools-scm-1.15.0";
1564 buildInputs = with self; [];
1538 buildInputs = with self; [];
1565 doCheck = false;
1539 doCheck = false;
1566 propagatedBuildInputs = with self; [];
1540 propagatedBuildInputs = with self; [];
1567 src = fetchurl {
1541 src = fetchurl {
1568 url = "https://pypi.python.org/packages/80/b7/31b6ae5fcb188e37f7e31abe75f9be90490a5456a72860fa6e643f8a3cbc/setuptools_scm-1.15.0.tar.gz";
1542 url = "https://pypi.python.org/packages/80/b7/31b6ae5fcb188e37f7e31abe75f9be90490a5456a72860fa6e643f8a3cbc/setuptools_scm-1.15.0.tar.gz";
1569 md5 = "b6916c78ed6253d6602444fad4279c5b";
1543 md5 = "b6916c78ed6253d6602444fad4279c5b";
1570 };
1544 };
1571 meta = {
1545 meta = {
1572 license = [ pkgs.lib.licenses.mit ];
1546 license = [ pkgs.lib.licenses.mit ];
1573 };
1547 };
1574 };
1548 };
1575 simplegeneric = super.buildPythonPackage {
1549 simplegeneric = super.buildPythonPackage {
1576 name = "simplegeneric-0.8.1";
1550 name = "simplegeneric-0.8.1";
1577 buildInputs = with self; [];
1551 buildInputs = with self; [];
1578 doCheck = false;
1552 doCheck = false;
1579 propagatedBuildInputs = with self; [];
1553 propagatedBuildInputs = with self; [];
1580 src = fetchurl {
1554 src = fetchurl {
1581 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
1555 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
1582 md5 = "f9c1fab00fd981be588fc32759f474e3";
1556 md5 = "f9c1fab00fd981be588fc32759f474e3";
1583 };
1557 };
1584 meta = {
1558 meta = {
1585 license = [ pkgs.lib.licenses.zpt21 ];
1559 license = [ pkgs.lib.licenses.zpt21 ];
1586 };
1560 };
1587 };
1561 };
1588 simplejson = super.buildPythonPackage {
1562 simplejson = super.buildPythonPackage {
1589 name = "simplejson-3.7.2";
1563 name = "simplejson-3.7.2";
1590 buildInputs = with self; [];
1564 buildInputs = with self; [];
1591 doCheck = false;
1565 doCheck = false;
1592 propagatedBuildInputs = with self; [];
1566 propagatedBuildInputs = with self; [];
1593 src = fetchurl {
1567 src = fetchurl {
1594 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1568 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1595 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1569 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1596 };
1570 };
1597 meta = {
1571 meta = {
1598 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
1572 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
1599 };
1573 };
1600 };
1574 };
1601 six = super.buildPythonPackage {
1575 six = super.buildPythonPackage {
1602 name = "six-1.9.0";
1576 name = "six-1.9.0";
1603 buildInputs = with self; [];
1577 buildInputs = with self; [];
1604 doCheck = false;
1578 doCheck = false;
1605 propagatedBuildInputs = with self; [];
1579 propagatedBuildInputs = with self; [];
1606 src = fetchurl {
1580 src = fetchurl {
1607 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1581 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1608 md5 = "476881ef4012262dfc8adc645ee786c4";
1582 md5 = "476881ef4012262dfc8adc645ee786c4";
1609 };
1583 };
1610 meta = {
1584 meta = {
1611 license = [ pkgs.lib.licenses.mit ];
1585 license = [ pkgs.lib.licenses.mit ];
1612 };
1586 };
1613 };
1587 };
1614 subprocess32 = super.buildPythonPackage {
1588 subprocess32 = super.buildPythonPackage {
1615 name = "subprocess32-3.2.6";
1589 name = "subprocess32-3.2.6";
1616 buildInputs = with self; [];
1590 buildInputs = with self; [];
1617 doCheck = false;
1591 doCheck = false;
1618 propagatedBuildInputs = with self; [];
1592 propagatedBuildInputs = with self; [];
1619 src = fetchurl {
1593 src = fetchurl {
1620 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1594 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1621 md5 = "754c5ab9f533e764f931136974b618f1";
1595 md5 = "754c5ab9f533e764f931136974b618f1";
1622 };
1596 };
1623 meta = {
1597 meta = {
1624 license = [ pkgs.lib.licenses.psfl ];
1598 license = [ pkgs.lib.licenses.psfl ];
1625 };
1599 };
1626 };
1600 };
1627 supervisor = super.buildPythonPackage {
1601 supervisor = super.buildPythonPackage {
1628 name = "supervisor-3.3.1";
1602 name = "supervisor-3.3.1";
1629 buildInputs = with self; [];
1603 buildInputs = with self; [];
1630 doCheck = false;
1604 doCheck = false;
1631 propagatedBuildInputs = with self; [meld3];
1605 propagatedBuildInputs = with self; [meld3];
1632 src = fetchurl {
1606 src = fetchurl {
1633 url = "https://pypi.python.org/packages/80/37/964c0d53cbd328796b1aeb7abea4c0f7b0e8c7197ea9b0b9967b7d004def/supervisor-3.3.1.tar.gz";
1607 url = "https://pypi.python.org/packages/80/37/964c0d53cbd328796b1aeb7abea4c0f7b0e8c7197ea9b0b9967b7d004def/supervisor-3.3.1.tar.gz";
1634 md5 = "202f760f9bf4930ec06557bac73e5cf2";
1608 md5 = "202f760f9bf4930ec06557bac73e5cf2";
1635 };
1609 };
1636 meta = {
1610 meta = {
1637 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1611 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1638 };
1612 };
1639 };
1613 };
1640 termcolor = super.buildPythonPackage {
1614 termcolor = super.buildPythonPackage {
1641 name = "termcolor-1.1.0";
1615 name = "termcolor-1.1.0";
1642 buildInputs = with self; [];
1616 buildInputs = with self; [];
1643 doCheck = false;
1617 doCheck = false;
1644 propagatedBuildInputs = with self; [];
1618 propagatedBuildInputs = with self; [];
1645 src = fetchurl {
1619 src = fetchurl {
1646 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
1620 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
1647 md5 = "043e89644f8909d462fbbfa511c768df";
1621 md5 = "043e89644f8909d462fbbfa511c768df";
1648 };
1622 };
1649 meta = {
1623 meta = {
1650 license = [ pkgs.lib.licenses.mit ];
1624 license = [ pkgs.lib.licenses.mit ];
1651 };
1625 };
1652 };
1626 };
1653 traitlets = super.buildPythonPackage {
1627 traitlets = super.buildPythonPackage {
1654 name = "traitlets-4.3.1";
1628 name = "traitlets-4.3.1";
1655 buildInputs = with self; [];
1629 buildInputs = with self; [];
1656 doCheck = false;
1630 doCheck = false;
1657 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
1631 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
1658 src = fetchurl {
1632 src = fetchurl {
1659 url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz";
1633 url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz";
1660 md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98";
1634 md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98";
1661 };
1635 };
1662 meta = {
1636 meta = {
1663 license = [ pkgs.lib.licenses.bsdOriginal ];
1637 license = [ pkgs.lib.licenses.bsdOriginal ];
1664 };
1638 };
1665 };
1639 };
1666 transifex-client = super.buildPythonPackage {
1640 transifex-client = super.buildPythonPackage {
1667 name = "transifex-client-0.10";
1641 name = "transifex-client-0.10";
1668 buildInputs = with self; [];
1642 buildInputs = with self; [];
1669 doCheck = false;
1643 doCheck = false;
1670 propagatedBuildInputs = with self; [];
1644 propagatedBuildInputs = with self; [];
1671 src = fetchurl {
1645 src = fetchurl {
1672 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1646 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1673 md5 = "5549538d84b8eede6b254cd81ae024fa";
1647 md5 = "5549538d84b8eede6b254cd81ae024fa";
1674 };
1648 };
1675 meta = {
1649 meta = {
1676 license = [ pkgs.lib.licenses.gpl2 ];
1650 license = [ pkgs.lib.licenses.gpl2 ];
1677 };
1651 };
1678 };
1652 };
1679 translationstring = super.buildPythonPackage {
1653 translationstring = super.buildPythonPackage {
1680 name = "translationstring-1.3";
1654 name = "translationstring-1.3";
1681 buildInputs = with self; [];
1655 buildInputs = with self; [];
1682 doCheck = false;
1656 doCheck = false;
1683 propagatedBuildInputs = with self; [];
1657 propagatedBuildInputs = with self; [];
1684 src = fetchurl {
1658 src = fetchurl {
1685 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1659 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1686 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1660 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1687 };
1661 };
1688 meta = {
1662 meta = {
1689 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1663 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1690 };
1664 };
1691 };
1665 };
1692 trollius = super.buildPythonPackage {
1666 trollius = super.buildPythonPackage {
1693 name = "trollius-1.0.4";
1667 name = "trollius-1.0.4";
1694 buildInputs = with self; [];
1668 buildInputs = with self; [];
1695 doCheck = false;
1669 doCheck = false;
1696 propagatedBuildInputs = with self; [futures];
1670 propagatedBuildInputs = with self; [futures];
1697 src = fetchurl {
1671 src = fetchurl {
1698 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1672 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1699 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1673 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1700 };
1674 };
1701 meta = {
1675 meta = {
1702 license = [ pkgs.lib.licenses.asl20 ];
1676 license = [ pkgs.lib.licenses.asl20 ];
1703 };
1677 };
1704 };
1678 };
1705 uWSGI = super.buildPythonPackage {
1679 uWSGI = super.buildPythonPackage {
1706 name = "uWSGI-2.0.11.2";
1680 name = "uWSGI-2.0.11.2";
1707 buildInputs = with self; [];
1681 buildInputs = with self; [];
1708 doCheck = false;
1682 doCheck = false;
1709 propagatedBuildInputs = with self; [];
1683 propagatedBuildInputs = with self; [];
1710 src = fetchurl {
1684 src = fetchurl {
1711 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1685 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1712 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1686 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1713 };
1687 };
1714 meta = {
1688 meta = {
1715 license = [ pkgs.lib.licenses.gpl2 ];
1689 license = [ pkgs.lib.licenses.gpl2 ];
1716 };
1690 };
1717 };
1691 };
1718 urllib3 = super.buildPythonPackage {
1692 urllib3 = super.buildPythonPackage {
1719 name = "urllib3-1.16";
1693 name = "urllib3-1.16";
1720 buildInputs = with self; [];
1694 buildInputs = with self; [];
1721 doCheck = false;
1695 doCheck = false;
1722 propagatedBuildInputs = with self; [];
1696 propagatedBuildInputs = with self; [];
1723 src = fetchurl {
1697 src = fetchurl {
1724 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1698 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1725 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1699 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1726 };
1700 };
1727 meta = {
1701 meta = {
1728 license = [ pkgs.lib.licenses.mit ];
1702 license = [ pkgs.lib.licenses.mit ];
1729 };
1703 };
1730 };
1704 };
1731 venusian = super.buildPythonPackage {
1705 venusian = super.buildPythonPackage {
1732 name = "venusian-1.0";
1706 name = "venusian-1.0";
1733 buildInputs = with self; [];
1707 buildInputs = with self; [];
1734 doCheck = false;
1708 doCheck = false;
1735 propagatedBuildInputs = with self; [];
1709 propagatedBuildInputs = with self; [];
1736 src = fetchurl {
1710 src = fetchurl {
1737 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1711 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1738 md5 = "dccf2eafb7113759d60c86faf5538756";
1712 md5 = "dccf2eafb7113759d60c86faf5538756";
1739 };
1713 };
1740 meta = {
1714 meta = {
1741 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1715 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1742 };
1716 };
1743 };
1717 };
1744 waitress = super.buildPythonPackage {
1718 waitress = super.buildPythonPackage {
1745 name = "waitress-1.0.1";
1719 name = "waitress-1.0.1";
1746 buildInputs = with self; [];
1720 buildInputs = with self; [];
1747 doCheck = false;
1721 doCheck = false;
1748 propagatedBuildInputs = with self; [];
1722 propagatedBuildInputs = with self; [];
1749 src = fetchurl {
1723 src = fetchurl {
1750 url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz";
1724 url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz";
1751 md5 = "dda92358a7569669086155923a46e57c";
1725 md5 = "dda92358a7569669086155923a46e57c";
1752 };
1726 };
1753 meta = {
1727 meta = {
1754 license = [ pkgs.lib.licenses.zpt21 ];
1728 license = [ pkgs.lib.licenses.zpt21 ];
1755 };
1729 };
1756 };
1730 };
1757 wcwidth = super.buildPythonPackage {
1731 wcwidth = super.buildPythonPackage {
1758 name = "wcwidth-0.1.7";
1732 name = "wcwidth-0.1.7";
1759 buildInputs = with self; [];
1733 buildInputs = with self; [];
1760 doCheck = false;
1734 doCheck = false;
1761 propagatedBuildInputs = with self; [];
1735 propagatedBuildInputs = with self; [];
1762 src = fetchurl {
1736 src = fetchurl {
1763 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
1737 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
1764 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
1738 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
1765 };
1739 };
1766 meta = {
1740 meta = {
1767 license = [ pkgs.lib.licenses.mit ];
1741 license = [ pkgs.lib.licenses.mit ];
1768 };
1742 };
1769 };
1743 };
1770 ws4py = super.buildPythonPackage {
1744 ws4py = super.buildPythonPackage {
1771 name = "ws4py-0.3.5";
1745 name = "ws4py-0.3.5";
1772 buildInputs = with self; [];
1746 buildInputs = with self; [];
1773 doCheck = false;
1747 doCheck = false;
1774 propagatedBuildInputs = with self; [];
1748 propagatedBuildInputs = with self; [];
1775 src = fetchurl {
1749 src = fetchurl {
1776 url = "https://pypi.python.org/packages/b6/4f/34af703be86939629479e74d6e650e39f3bd73b3b09212c34e5125764cbc/ws4py-0.3.5.zip";
1750 url = "https://pypi.python.org/packages/b6/4f/34af703be86939629479e74d6e650e39f3bd73b3b09212c34e5125764cbc/ws4py-0.3.5.zip";
1777 md5 = "a261b75c20b980e55ce7451a3576a867";
1751 md5 = "a261b75c20b980e55ce7451a3576a867";
1778 };
1752 };
1779 meta = {
1753 meta = {
1780 license = [ pkgs.lib.licenses.bsdOriginal ];
1754 license = [ pkgs.lib.licenses.bsdOriginal ];
1781 };
1755 };
1782 };
1756 };
1783 wsgiref = super.buildPythonPackage {
1757 wsgiref = super.buildPythonPackage {
1784 name = "wsgiref-0.1.2";
1758 name = "wsgiref-0.1.2";
1785 buildInputs = with self; [];
1759 buildInputs = with self; [];
1786 doCheck = false;
1760 doCheck = false;
1787 propagatedBuildInputs = with self; [];
1761 propagatedBuildInputs = with self; [];
1788 src = fetchurl {
1762 src = fetchurl {
1789 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1763 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1790 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1764 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1791 };
1765 };
1792 meta = {
1766 meta = {
1793 license = [ { fullName = "PSF or ZPL"; } ];
1767 license = [ { fullName = "PSF or ZPL"; } ];
1794 };
1768 };
1795 };
1769 };
1796 zope.cachedescriptors = super.buildPythonPackage {
1770 zope.cachedescriptors = super.buildPythonPackage {
1797 name = "zope.cachedescriptors-4.0.0";
1771 name = "zope.cachedescriptors-4.0.0";
1798 buildInputs = with self; [];
1772 buildInputs = with self; [];
1799 doCheck = false;
1773 doCheck = false;
1800 propagatedBuildInputs = with self; [setuptools];
1774 propagatedBuildInputs = with self; [setuptools];
1801 src = fetchurl {
1775 src = fetchurl {
1802 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1776 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1803 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1777 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1804 };
1778 };
1805 meta = {
1779 meta = {
1806 license = [ pkgs.lib.licenses.zpt21 ];
1780 license = [ pkgs.lib.licenses.zpt21 ];
1807 };
1781 };
1808 };
1782 };
1809 zope.deprecation = super.buildPythonPackage {
1783 zope.deprecation = super.buildPythonPackage {
1810 name = "zope.deprecation-4.1.2";
1784 name = "zope.deprecation-4.1.2";
1811 buildInputs = with self; [];
1785 buildInputs = with self; [];
1812 doCheck = false;
1786 doCheck = false;
1813 propagatedBuildInputs = with self; [setuptools];
1787 propagatedBuildInputs = with self; [setuptools];
1814 src = fetchurl {
1788 src = fetchurl {
1815 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1789 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1816 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1790 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1817 };
1791 };
1818 meta = {
1792 meta = {
1819 license = [ pkgs.lib.licenses.zpt21 ];
1793 license = [ pkgs.lib.licenses.zpt21 ];
1820 };
1794 };
1821 };
1795 };
1822 zope.event = super.buildPythonPackage {
1796 zope.event = super.buildPythonPackage {
1823 name = "zope.event-4.0.3";
1797 name = "zope.event-4.0.3";
1824 buildInputs = with self; [];
1798 buildInputs = with self; [];
1825 doCheck = false;
1799 doCheck = false;
1826 propagatedBuildInputs = with self; [setuptools];
1800 propagatedBuildInputs = with self; [setuptools];
1827 src = fetchurl {
1801 src = fetchurl {
1828 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1802 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1829 md5 = "9a3780916332b18b8b85f522bcc3e249";
1803 md5 = "9a3780916332b18b8b85f522bcc3e249";
1830 };
1804 };
1831 meta = {
1805 meta = {
1832 license = [ pkgs.lib.licenses.zpt21 ];
1806 license = [ pkgs.lib.licenses.zpt21 ];
1833 };
1807 };
1834 };
1808 };
1835 zope.interface = super.buildPythonPackage {
1809 zope.interface = super.buildPythonPackage {
1836 name = "zope.interface-4.1.3";
1810 name = "zope.interface-4.1.3";
1837 buildInputs = with self; [];
1811 buildInputs = with self; [];
1838 doCheck = false;
1812 doCheck = false;
1839 propagatedBuildInputs = with self; [setuptools];
1813 propagatedBuildInputs = with self; [setuptools];
1840 src = fetchurl {
1814 src = fetchurl {
1841 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1815 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1842 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1816 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1843 };
1817 };
1844 meta = {
1818 meta = {
1845 license = [ pkgs.lib.licenses.zpt21 ];
1819 license = [ pkgs.lib.licenses.zpt21 ];
1846 };
1820 };
1847 };
1821 };
1848
1822
1849 ### Test requirements
1823 ### Test requirements
1850
1824
1851
1825
1852 }
1826 }
@@ -1,12 +1,11 b''
1 [pytest]
1 [pytest]
2 testpaths = ./rhodecode
2 testpaths = ./rhodecode
3 pylons_config = rhodecode/tests/rhodecode.ini
3 pylons_config = rhodecode/tests/rhodecode.ini
4 vcsserver_protocol = http
4 vcsserver_protocol = http
5 vcsserver_config_pyro4 = rhodecode/tests/vcsserver_pyro4.ini
6 vcsserver_config_http = rhodecode/tests/vcsserver_http.ini
5 vcsserver_config_http = rhodecode/tests/vcsserver_http.ini
7 norecursedirs = tests/scripts
6 norecursedirs = tests/scripts
8 addopts = -k "not _BaseTest"
7 addopts = -k "not _BaseTest"
9 markers =
8 markers =
10 vcs_operations: Mark tests depending on a running RhodeCode instance.
9 vcs_operations: Mark tests depending on a running RhodeCode instance.
11 xfail_backends: Mark tests as xfail for given backends.
10 xfail_backends: Mark tests as xfail for given backends.
12 skip_backends: Mark tests as skipped for given backends.
11 skip_backends: Mark tests as skipped for given backends.
@@ -1,131 +1,127 b''
1 ## core
1 ## core
2 setuptools==30.1.0
2 setuptools==30.1.0
3 setuptools-scm==1.15.0
3 setuptools-scm==1.15.0
4
4
5 amqplib==1.0.2
5 amqplib==1.0.2
6 anyjson==0.3.3
6 anyjson==0.3.3
7 authomatic==0.1.0.post1
7 authomatic==0.1.0.post1
8 Babel==1.3
8 Babel==1.3
9 backport-ipaddress==0.1
9 backport-ipaddress==0.1
10 Beaker==1.7.0
10 Beaker==1.7.0
11 celery==2.2.10
11 celery==2.2.10
12 Chameleon==2.24
12 Chameleon==2.24
13 channelstream==0.5.2
13 channelstream==0.5.2
14 click==5.1
14 click==5.1
15 colander==1.2
15 colander==1.2
16 configobj==5.0.6
16 configobj==5.0.6
17 decorator==3.4.2
17 decorator==3.4.2
18 deform==2.0a2
18 deform==2.0a2
19 docutils==0.12
19 docutils==0.12
20 dogpile.cache==0.6.1
20 dogpile.cache==0.6.1
21 dogpile.core==0.4.1
21 dogpile.core==0.4.1
22 ecdsa==0.11
22 ecdsa==0.11
23 FormEncode==1.2.4
23 FormEncode==1.2.4
24 future==0.14.3
24 future==0.14.3
25 futures==3.0.2
25 futures==3.0.2
26 gnureadline==6.3.3
26 gnureadline==6.3.3
27 infrae.cache==1.0.1
27 infrae.cache==1.0.1
28 iso8601==0.1.11
28 iso8601==0.1.11
29 itsdangerous==0.24
29 itsdangerous==0.24
30 Jinja2==2.7.3
30 Jinja2==2.7.3
31 kombu==1.5.1
31 kombu==1.5.1
32 Mako==1.0.6
32 Mako==1.0.6
33 Markdown==2.6.7
33 Markdown==2.6.7
34 MarkupSafe==0.23
34 MarkupSafe==0.23
35 meld3==1.0.2
35 meld3==1.0.2
36 msgpack-python==0.4.8
36 msgpack-python==0.4.8
37 MySQL-python==1.2.5
37 MySQL-python==1.2.5
38 nose==1.3.6
38 nose==1.3.6
39 objgraph==2.0.0
39 objgraph==2.0.0
40 packaging==15.2
40 packaging==15.2
41 paramiko==1.15.1
41 paramiko==1.15.1
42 Paste==2.0.3
42 Paste==2.0.3
43 PasteDeploy==1.5.2
43 PasteDeploy==1.5.2
44 PasteScript==1.7.5
44 PasteScript==1.7.5
45 psutil==4.3.1
45 psutil==4.3.1
46 psycopg2==2.6.1
46 psycopg2==2.6.1
47 py-bcrypt==0.4
47 py-bcrypt==0.4
48 pycrypto==2.6.1
48 pycrypto==2.6.1
49 pycurl==7.19.5
49 pycurl==7.19.5
50 pyflakes==0.8.1
50 pyflakes==0.8.1
51 pygments-markdown-lexer==0.1.0.dev39
51 pygments-markdown-lexer==0.1.0.dev39
52 Pygments==2.2.0
52 Pygments==2.2.0
53 pyparsing==1.5.7
53 pyparsing==1.5.7
54 pyramid-beaker==0.8
54 pyramid-beaker==0.8
55 pyramid-debugtoolbar==3.0.5
55 pyramid-debugtoolbar==3.0.5
56 pyramid-jinja2==2.5
56 pyramid-jinja2==2.5
57 pyramid-mako==1.0.2
57 pyramid-mako==1.0.2
58 pyramid==1.7.4
58 pyramid==1.7.4
59 pysqlite==2.6.3
59 pysqlite==2.6.3
60 python-dateutil==1.5
60 python-dateutil==1.5
61 python-ldap==2.4.19
61 python-ldap==2.4.19
62 python-memcached==1.57
62 python-memcached==1.57
63 python-pam==1.8.2
63 python-pam==1.8.2
64 pytz==2015.4
64 pytz==2015.4
65 pyzmq==14.6.0
65 pyzmq==14.6.0
66 recaptcha-client==1.0.6
66 recaptcha-client==1.0.6
67 repoze.lru==0.6
67 repoze.lru==0.6
68 requests==2.9.1
68 requests==2.9.1
69 Routes==1.13
69 Routes==1.13
70 setproctitle==1.1.8
70 setproctitle==1.1.8
71 simplejson==3.7.2
71 simplejson==3.7.2
72 six==1.9.0
72 six==1.9.0
73 Sphinx==1.2.2
73 Sphinx==1.2.2
74 SQLAlchemy==0.9.9
74 SQLAlchemy==0.9.9
75 subprocess32==3.2.6
75 subprocess32==3.2.6
76 supervisor==3.3.1
76 supervisor==3.3.1
77 Tempita==0.5.2
77 Tempita==0.5.2
78 translationstring==1.3
78 translationstring==1.3
79 trollius==1.0.4
79 trollius==1.0.4
80 urllib3==1.16
80 urllib3==1.16
81 URLObject==2.4.0
81 URLObject==2.4.0
82 venusian==1.0
82 venusian==1.0
83 WebError==0.10.3
83 WebError==0.10.3
84 WebHelpers2==2.0
84 WebHelpers2==2.0
85 WebHelpers==1.3
85 WebHelpers==1.3
86 WebOb==1.3.1
86 WebOb==1.3.1
87 Whoosh==2.7.4
87 Whoosh==2.7.4
88 wsgiref==0.1.2
88 wsgiref==0.1.2
89 zope.cachedescriptors==4.0.0
89 zope.cachedescriptors==4.0.0
90 zope.deprecation==4.1.2
90 zope.deprecation==4.1.2
91 zope.event==4.0.3
91 zope.event==4.0.3
92 zope.interface==4.1.3
92 zope.interface==4.1.3
93
93
94 ## customized/patched libs
94 ## customized/patched libs
95 # our patched version of Pylons==1.0.2
95 # our patched version of Pylons==1.0.2
96 https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f#egg=Pylons==1.0.2.rhodecode-patch-1
96 https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f#egg=Pylons==1.0.2.rhodecode-patch-1
97 # not released py-gfm==0.1.3
97 # not released py-gfm==0.1.3
98 https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16#egg=py-gfm==0.1.3.rhodecode-upstream1
98 https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16#egg=py-gfm==0.1.3.rhodecode-upstream1
99
99
100
100
101 ## cli tools
101 ## cli tools
102 alembic==0.8.4
102 alembic==0.8.4
103 invoke==0.13.0
103 invoke==0.13.0
104 bumpversion==0.5.3
104 bumpversion==0.5.3
105 transifex-client==0.10
105 transifex-client==0.10
106
106
107 ## http servers
107 ## http servers
108 gevent==1.1.2
108 gevent==1.1.2
109 greenlet==0.4.10
109 greenlet==0.4.10
110 gunicorn==19.6.0
110 gunicorn==19.6.0
111 waitress==1.0.1
111 waitress==1.0.1
112 uWSGI==2.0.11.2
112 uWSGI==2.0.11.2
113
113
114 ## debug
114 ## debug
115 ipdb==0.10.1
115 ipdb==0.10.1
116 ipython==5.1.0
116 ipython==5.1.0
117 CProfileV==1.0.6
117 CProfileV==1.0.6
118 bottle==0.12.8
118 bottle==0.12.8
119
119
120 ## rhodecode-tools, special case
120 ## rhodecode-tools, special case
121 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.11.0.tar.gz?md5=e5fd0a8363af08a0ced71b50ca9cce15#egg=rhodecode-tools==0.11.0
121 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.11.0.tar.gz?md5=e5fd0a8363af08a0ced71b50ca9cce15#egg=rhodecode-tools==0.11.0
122
122
123 ## appenlight
123 ## appenlight
124 appenlight-client==0.6.14
124 appenlight-client==0.6.14
125
125
126 # Pyro/Deprecated TODO(Marcink): remove in 4.7 release.
127 Pyro4==4.41
128 serpent==1.15
129
130 ## test related requirements
126 ## test related requirements
131 -r requirements_test.txt
127 -r requirements_test.txt
@@ -1,190 +1,189 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Pylons environment configuration
22 Pylons environment configuration
23 """
23 """
24
24
25 import os
25 import os
26 import logging
26 import logging
27 import rhodecode
27 import rhodecode
28 import platform
28 import platform
29 import re
29 import re
30 import io
30 import io
31
31
32 from mako.lookup import TemplateLookup
32 from mako.lookup import TemplateLookup
33 from pylons.configuration import PylonsConfig
33 from pylons.configuration import PylonsConfig
34 from pylons.error import handle_mako_error
34 from pylons.error import handle_mako_error
35 from pyramid.settings import asbool
35 from pyramid.settings import asbool
36
36
37 # ------------------------------------------------------------------------------
37 # ------------------------------------------------------------------------------
38 # CELERY magic until refactor - issue #4163 - import order matters here:
38 # CELERY magic until refactor - issue #4163 - import order matters here:
39 from rhodecode.lib import celerypylons # this must be first, celerypylons
39 from rhodecode.lib import celerypylons # this must be first, celerypylons
40 # sets config settings upon import
40 # sets config settings upon import
41
41
42 import rhodecode.integrations # any modules using celery task
42 import rhodecode.integrations # any modules using celery task
43 # decorators should be added afterwards:
43 # decorators should be added afterwards:
44 # ------------------------------------------------------------------------------
44 # ------------------------------------------------------------------------------
45
45
46 from rhodecode.lib import app_globals
46 from rhodecode.lib import app_globals
47 from rhodecode.config import utils
47 from rhodecode.config import utils
48 from rhodecode.config.routing import make_map
48 from rhodecode.config.routing import make_map
49 from rhodecode.config.jsroutes import generate_jsroutes_content
49 from rhodecode.config.jsroutes import generate_jsroutes_content
50
50
51 from rhodecode.lib import helpers
51 from rhodecode.lib import helpers
52 from rhodecode.lib.auth import set_available_permissions
52 from rhodecode.lib.auth import set_available_permissions
53 from rhodecode.lib.utils import (
53 from rhodecode.lib.utils import (
54 repo2db_mapper, make_db_config, set_rhodecode_config,
54 repo2db_mapper, make_db_config, set_rhodecode_config,
55 load_rcextensions)
55 load_rcextensions)
56 from rhodecode.lib.utils2 import str2bool, aslist
56 from rhodecode.lib.utils2 import str2bool, aslist
57 from rhodecode.lib.vcs import connect_vcs, start_vcs_server
57 from rhodecode.lib.vcs import connect_vcs, start_vcs_server
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62 def load_environment(global_conf, app_conf, initial=False,
62 def load_environment(global_conf, app_conf, initial=False,
63 test_env=None, test_index=None):
63 test_env=None, test_index=None):
64 """
64 """
65 Configure the Pylons environment via the ``pylons.config``
65 Configure the Pylons environment via the ``pylons.config``
66 object
66 object
67 """
67 """
68 config = PylonsConfig()
68 config = PylonsConfig()
69
69
70
70
71 # Pylons paths
71 # Pylons paths
72 root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
72 root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
73 paths = {
73 paths = {
74 'root': root,
74 'root': root,
75 'controllers': os.path.join(root, 'controllers'),
75 'controllers': os.path.join(root, 'controllers'),
76 'static_files': os.path.join(root, 'public'),
76 'static_files': os.path.join(root, 'public'),
77 'templates': [os.path.join(root, 'templates')],
77 'templates': [os.path.join(root, 'templates')],
78 }
78 }
79
79
80 # Initialize config with the basic options
80 # Initialize config with the basic options
81 config.init_app(global_conf, app_conf, package='rhodecode', paths=paths)
81 config.init_app(global_conf, app_conf, package='rhodecode', paths=paths)
82
82
83 # store some globals into rhodecode
83 # store some globals into rhodecode
84 rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery'))
84 rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery'))
85 rhodecode.CELERY_EAGER = str2bool(
85 rhodecode.CELERY_EAGER = str2bool(
86 config['app_conf'].get('celery.always.eager'))
86 config['app_conf'].get('celery.always.eager'))
87
87
88 config['routes.map'] = make_map(config)
88 config['routes.map'] = make_map(config)
89
89
90 if asbool(config.get('generate_js_files', 'false')):
90 if asbool(config.get('generate_js_files', 'false')):
91 jsroutes = config['routes.map'].jsroutes()
91 jsroutes = config['routes.map'].jsroutes()
92 jsroutes_file_content = generate_jsroutes_content(jsroutes)
92 jsroutes_file_content = generate_jsroutes_content(jsroutes)
93 jsroutes_file_path = os.path.join(
93 jsroutes_file_path = os.path.join(
94 paths['static_files'], 'js', 'rhodecode', 'routes.js')
94 paths['static_files'], 'js', 'rhodecode', 'routes.js')
95
95
96 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
96 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
97 f.write(jsroutes_file_content)
97 f.write(jsroutes_file_content)
98
98
99 config['pylons.app_globals'] = app_globals.Globals(config)
99 config['pylons.app_globals'] = app_globals.Globals(config)
100 config['pylons.h'] = helpers
100 config['pylons.h'] = helpers
101 rhodecode.CONFIG = config
101 rhodecode.CONFIG = config
102
102
103 load_rcextensions(root_path=config['here'])
103 load_rcextensions(root_path=config['here'])
104
104
105 # Setup cache object as early as possible
105 # Setup cache object as early as possible
106 import pylons
106 import pylons
107 pylons.cache._push_object(config['pylons.app_globals'].cache)
107 pylons.cache._push_object(config['pylons.app_globals'].cache)
108
108
109 # Create the Mako TemplateLookup, with the default auto-escaping
109 # Create the Mako TemplateLookup, with the default auto-escaping
110 config['pylons.app_globals'].mako_lookup = TemplateLookup(
110 config['pylons.app_globals'].mako_lookup = TemplateLookup(
111 directories=paths['templates'],
111 directories=paths['templates'],
112 error_handler=handle_mako_error,
112 error_handler=handle_mako_error,
113 module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
113 module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
114 input_encoding='utf-8', default_filters=['escape'],
114 input_encoding='utf-8', default_filters=['escape'],
115 imports=['from webhelpers.html import escape'])
115 imports=['from webhelpers.html import escape'])
116
116
117 # sets the c attribute access when don't existing attribute are accessed
117 # sets the c attribute access when don't existing attribute are accessed
118 config['pylons.strict_tmpl_context'] = True
118 config['pylons.strict_tmpl_context'] = True
119
119
120 # configure channelstream
120 # configure channelstream
121 config['channelstream_config'] = {
121 config['channelstream_config'] = {
122 'enabled': asbool(config.get('channelstream.enabled', False)),
122 'enabled': asbool(config.get('channelstream.enabled', False)),
123 'server': config.get('channelstream.server'),
123 'server': config.get('channelstream.server'),
124 'secret': config.get('channelstream.secret')
124 'secret': config.get('channelstream.secret')
125 }
125 }
126
126
127 set_available_permissions(config)
127 set_available_permissions(config)
128 db_cfg = make_db_config(clear_session=True)
128 db_cfg = make_db_config(clear_session=True)
129
129
130 repos_path = list(db_cfg.items('paths'))[0][1]
130 repos_path = list(db_cfg.items('paths'))[0][1]
131 config['base_path'] = repos_path
131 config['base_path'] = repos_path
132
132
133 # store db config also in main global CONFIG
133 # store db config also in main global CONFIG
134 set_rhodecode_config(config)
134 set_rhodecode_config(config)
135
135
136 # configure instance id
136 # configure instance id
137 utils.set_instance_id(config)
137 utils.set_instance_id(config)
138
138
139 # CONFIGURATION OPTIONS HERE (note: all config options will override
139 # CONFIGURATION OPTIONS HERE (note: all config options will override
140 # any Pylons config options)
140 # any Pylons config options)
141
141
142 # store config reference into our module to skip import magic of pylons
142 # store config reference into our module to skip import magic of pylons
143 rhodecode.CONFIG.update(config)
143 rhodecode.CONFIG.update(config)
144
144
145 return config
145 return config
146
146
147
147
148 def load_pyramid_environment(global_config, settings):
148 def load_pyramid_environment(global_config, settings):
149 # Some parts of the code expect a merge of global and app settings.
149 # Some parts of the code expect a merge of global and app settings.
150 settings_merged = global_config.copy()
150 settings_merged = global_config.copy()
151 settings_merged.update(settings)
151 settings_merged.update(settings)
152
152
153 # Store the settings to make them available to other modules.
153 # Store the settings to make them available to other modules.
154 rhodecode.PYRAMID_SETTINGS = settings_merged
154 rhodecode.PYRAMID_SETTINGS = settings_merged
155
155
156 # If this is a test run we prepare the test environment like
156 # If this is a test run we prepare the test environment like
157 # creating a test database, test search index and test repositories.
157 # creating a test database, test search index and test repositories.
158 # This has to be done before the database connection is initialized.
158 # This has to be done before the database connection is initialized.
159 if settings['is_test']:
159 if settings['is_test']:
160 rhodecode.is_test = True
160 rhodecode.is_test = True
161 rhodecode.disable_error_handler = True
161 rhodecode.disable_error_handler = True
162
162
163 utils.initialize_test_environment(settings_merged)
163 utils.initialize_test_environment(settings_merged)
164
164
165 # Initialize the database connection.
165 # Initialize the database connection.
166 utils.initialize_database(settings_merged)
166 utils.initialize_database(settings_merged)
167
167
168 # Limit backends to `vcs.backends` from configuration
168 # Limit backends to `vcs.backends` from configuration
169 for alias in rhodecode.BACKENDS.keys():
169 for alias in rhodecode.BACKENDS.keys():
170 if alias not in settings['vcs.backends']:
170 if alias not in settings['vcs.backends']:
171 del rhodecode.BACKENDS[alias]
171 del rhodecode.BACKENDS[alias]
172 log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys())
172 log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys())
173
173
174 # initialize vcs client and optionally run the server if enabled
174 # initialize vcs client and optionally run the server if enabled
175 vcs_server_uri = settings['vcs.server']
175 vcs_server_uri = settings['vcs.server']
176 vcs_server_enabled = settings['vcs.server.enable']
176 vcs_server_enabled = settings['vcs.server.enable']
177 start_server = (
177 start_server = (
178 settings['vcs.start_server'] and
178 settings['vcs.start_server'] and
179 not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0')))
179 not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0')))
180
180
181 if vcs_server_enabled and start_server:
181 if vcs_server_enabled and start_server:
182 log.info("Starting vcsserver")
182 log.info("Starting vcsserver")
183 start_vcs_server(server_and_port=vcs_server_uri,
183 start_vcs_server(server_and_port=vcs_server_uri,
184 protocol=utils.get_vcs_server_protocol(settings),
184 protocol=utils.get_vcs_server_protocol(settings),
185 log_level=settings['vcs.server.log_level'])
185 log_level=settings['vcs.server.log_level'])
186
186
187 utils.configure_pyro4(settings)
188 utils.configure_vcs(settings)
187 utils.configure_vcs(settings)
189 if vcs_server_enabled:
188 if vcs_server_enabled:
190 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
189 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
@@ -1,256 +1,250 b''
1 {
1 {
2 "nodejs-4.3.1": {
2 "nodejs-4.3.1": {
3 "MIT License": "http://spdx.org/licenses/MIT"
3 "MIT License": "http://spdx.org/licenses/MIT"
4 },
4 },
5 "postgresql-9.5.1": {
5 "postgresql-9.5.1": {
6 "PostgreSQL License": "http://spdx.org/licenses/PostgreSQL"
6 "PostgreSQL License": "http://spdx.org/licenses/PostgreSQL"
7 },
7 },
8 "python-2.7.11": {
8 "python-2.7.11": {
9 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
9 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
10 },
10 },
11 "python2.7-Babel-1.3": {
11 "python2.7-Babel-1.3": {
12 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
12 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
13 },
13 },
14 "python2.7-Beaker-1.7.0": {
14 "python2.7-Beaker-1.7.0": {
15 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
15 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
16 },
16 },
17 "python2.7-FormEncode-1.2.4": {
17 "python2.7-FormEncode-1.2.4": {
18 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
18 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
19 },
19 },
20 "python2.7-Mako-1.0.1": {
20 "python2.7-Mako-1.0.1": {
21 "MIT License": "http://spdx.org/licenses/MIT"
21 "MIT License": "http://spdx.org/licenses/MIT"
22 },
22 },
23 "python2.7-Markdown-2.6.2": {
23 "python2.7-Markdown-2.6.2": {
24 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
24 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
25 },
25 },
26 "python2.7-MarkupSafe-0.23": {
26 "python2.7-MarkupSafe-0.23": {
27 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
27 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
28 },
28 },
29 "python2.7-Paste-2.0.2": {
29 "python2.7-Paste-2.0.2": {
30 "MIT License": "http://spdx.org/licenses/MIT"
30 "MIT License": "http://spdx.org/licenses/MIT"
31 },
31 },
32 "python2.7-PasteDeploy-1.5.2": {
32 "python2.7-PasteDeploy-1.5.2": {
33 "MIT License": "http://spdx.org/licenses/MIT"
33 "MIT License": "http://spdx.org/licenses/MIT"
34 },
34 },
35 "python2.7-PasteScript-1.7.5": {
35 "python2.7-PasteScript-1.7.5": {
36 "MIT License": "http://spdx.org/licenses/MIT"
36 "MIT License": "http://spdx.org/licenses/MIT"
37 },
37 },
38 "python2.7-Pygments-2.0.2": {
38 "python2.7-Pygments-2.0.2": {
39 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
39 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
40 },
40 },
41 "python2.7-Pylons-1.0.1-patch1": {
41 "python2.7-Pylons-1.0.1-patch1": {
42 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
42 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
43 },
43 },
44 "python2.7-Pyro4-4.35": {
45 "MIT License": "http://spdx.org/licenses/MIT"
46 },
47 "python2.7-Routes-1.13": {
44 "python2.7-Routes-1.13": {
48 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
45 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
49 },
46 },
50 "python2.7-SQLAlchemy-0.9.9": {
47 "python2.7-SQLAlchemy-0.9.9": {
51 "MIT License": "http://spdx.org/licenses/MIT"
48 "MIT License": "http://spdx.org/licenses/MIT"
52 },
49 },
53 "python2.7-Tempita-0.5.2": {
50 "python2.7-Tempita-0.5.2": {
54 "MIT License": "http://spdx.org/licenses/MIT"
51 "MIT License": "http://spdx.org/licenses/MIT"
55 },
52 },
56 "python2.7-URLObject-2.4.0": {
53 "python2.7-URLObject-2.4.0": {
57 "The Unlicense": "http://unlicense.org/"
54 "The Unlicense": "http://unlicense.org/"
58 },
55 },
59 "python2.7-WebError-0.10.3": {
56 "python2.7-WebError-0.10.3": {
60 "MIT License": "http://spdx.org/licenses/MIT"
57 "MIT License": "http://spdx.org/licenses/MIT"
61 },
58 },
62 "python2.7-WebHelpers-1.3": {
59 "python2.7-WebHelpers-1.3": {
63 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
60 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
64 },
61 },
65 "python2.7-WebHelpers2-2.0": {
62 "python2.7-WebHelpers2-2.0": {
66 "MIT License": "http://spdx.org/licenses/MIT"
63 "MIT License": "http://spdx.org/licenses/MIT"
67 },
64 },
68 "python2.7-WebOb-1.3.1": {
65 "python2.7-WebOb-1.3.1": {
69 "MIT License": "http://spdx.org/licenses/MIT"
66 "MIT License": "http://spdx.org/licenses/MIT"
70 },
67 },
71 "python2.7-Whoosh-2.7.0": {
68 "python2.7-Whoosh-2.7.0": {
72 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause",
69 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause",
73 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
70 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
74 },
71 },
75 "python2.7-alembic-0.8.4": {
72 "python2.7-alembic-0.8.4": {
76 "MIT License": "http://spdx.org/licenses/MIT"
73 "MIT License": "http://spdx.org/licenses/MIT"
77 },
74 },
78 "python2.7-amqplib-1.0.2": {
75 "python2.7-amqplib-1.0.2": {
79 "GNU Lesser General Public License v3.0 only": "http://spdx.org/licenses/LGPL-3.0"
76 "GNU Lesser General Public License v3.0 only": "http://spdx.org/licenses/LGPL-3.0"
80 },
77 },
81 "python2.7-anyjson-0.3.3": {
78 "python2.7-anyjson-0.3.3": {
82 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
79 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
83 },
80 },
84 "python2.7-appenlight-client-0.6.14": {
81 "python2.7-appenlight-client-0.6.14": {
85 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
82 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
86 },
83 },
87 "python2.7-authomatic-0.1.0.post1": {
84 "python2.7-authomatic-0.1.0.post1": {
88 "MIT License": "http://spdx.org/licenses/MIT"
85 "MIT License": "http://spdx.org/licenses/MIT"
89 },
86 },
90 "python2.7-backport-ipaddress-0.1": {
87 "python2.7-backport-ipaddress-0.1": {
91 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
88 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
92 },
89 },
93 "python2.7-celery-2.2.10": {
90 "python2.7-celery-2.2.10": {
94 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
91 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
95 },
92 },
96 "python2.7-click-5.1": {
93 "python2.7-click-5.1": {
97 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
94 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
98 },
95 },
99 "python2.7-colander-1.2": {
96 "python2.7-colander-1.2": {
100 "Repoze License": "http://www.repoze.org/LICENSE.txt"
97 "Repoze License": "http://www.repoze.org/LICENSE.txt"
101 },
98 },
102 "python2.7-configobj-5.0.6": {
99 "python2.7-configobj-5.0.6": {
103 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
100 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
104 },
101 },
105 "python2.7-cssselect-0.9.1": {
102 "python2.7-cssselect-0.9.1": {
106 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
103 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
107 },
104 },
108 "python2.7-decorator-3.4.2": {
105 "python2.7-decorator-3.4.2": {
109 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
106 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
110 },
107 },
111 "python2.7-docutils-0.12": {
108 "python2.7-docutils-0.12": {
112 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause"
109 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause"
113 },
110 },
114 "python2.7-elasticsearch-2.3.0": {
111 "python2.7-elasticsearch-2.3.0": {
115 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
112 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
116 },
113 },
117 "python2.7-elasticsearch-dsl-2.0.0": {
114 "python2.7-elasticsearch-dsl-2.0.0": {
118 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
115 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
119 },
116 },
120 "python2.7-future-0.14.3": {
117 "python2.7-future-0.14.3": {
121 "MIT License": "http://spdx.org/licenses/MIT"
118 "MIT License": "http://spdx.org/licenses/MIT"
122 },
119 },
123 "python2.7-futures-3.0.2": {
120 "python2.7-futures-3.0.2": {
124 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
121 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
125 },
122 },
126 "python2.7-gnureadline-6.3.3": {
123 "python2.7-gnureadline-6.3.3": {
127 "GNU General Public License v1.0 only": "http://spdx.org/licenses/GPL-1.0"
124 "GNU General Public License v1.0 only": "http://spdx.org/licenses/GPL-1.0"
128 },
125 },
129 "python2.7-gunicorn-19.6.0": {
126 "python2.7-gunicorn-19.6.0": {
130 "MIT License": "http://spdx.org/licenses/MIT"
127 "MIT License": "http://spdx.org/licenses/MIT"
131 },
128 },
132 "python2.7-infrae.cache-1.0.1": {
129 "python2.7-infrae.cache-1.0.1": {
133 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
130 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
134 },
131 },
135 "python2.7-ipython-3.1.0": {
132 "python2.7-ipython-3.1.0": {
136 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
133 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
137 },
134 },
138 "python2.7-iso8601-0.1.11": {
135 "python2.7-iso8601-0.1.11": {
139 "MIT License": "http://spdx.org/licenses/MIT"
136 "MIT License": "http://spdx.org/licenses/MIT"
140 },
137 },
141 "python2.7-kombu-1.5.1": {
138 "python2.7-kombu-1.5.1": {
142 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
139 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
143 },
140 },
144 "python2.7-msgpack-python-0.4.6": {
141 "python2.7-msgpack-python-0.4.6": {
145 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
142 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
146 },
143 },
147 "python2.7-packaging-15.2": {
144 "python2.7-packaging-15.2": {
148 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
145 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
149 },
146 },
150 "python2.7-psutil-2.2.1": {
147 "python2.7-psutil-2.2.1": {
151 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
148 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
152 },
149 },
153 "python2.7-psycopg2-2.6": {
150 "python2.7-psycopg2-2.6": {
154 "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+"
151 "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+"
155 },
152 },
156 "python2.7-py-1.4.29": {
153 "python2.7-py-1.4.29": {
157 "MIT License": "http://spdx.org/licenses/MIT"
154 "MIT License": "http://spdx.org/licenses/MIT"
158 },
155 },
159 "python2.7-py-bcrypt-0.4": {
156 "python2.7-py-bcrypt-0.4": {
160 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
157 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
161 },
158 },
162 "python2.7-pycrypto-2.6.1": {
159 "python2.7-pycrypto-2.6.1": {
163 "Public Domain": null
160 "Public Domain": null
164 },
161 },
165 "python2.7-pycurl-7.19.5": {
162 "python2.7-pycurl-7.19.5": {
166 "MIT License": "http://spdx.org/licenses/MIT"
163 "MIT License": "http://spdx.org/licenses/MIT"
167 },
164 },
168 "python2.7-pyparsing-1.5.7": {
165 "python2.7-pyparsing-1.5.7": {
169 "MIT License": "http://spdx.org/licenses/MIT"
166 "MIT License": "http://spdx.org/licenses/MIT"
170 },
167 },
171 "python2.7-pyramid-1.6.1": {
168 "python2.7-pyramid-1.6.1": {
172 "Repoze License": "http://www.repoze.org/LICENSE.txt"
169 "Repoze License": "http://www.repoze.org/LICENSE.txt"
173 },
170 },
174 "python2.7-pyramid-beaker-0.8": {
171 "python2.7-pyramid-beaker-0.8": {
175 "Repoze License": "http://www.repoze.org/LICENSE.txt"
172 "Repoze License": "http://www.repoze.org/LICENSE.txt"
176 },
173 },
177 "python2.7-pyramid-debugtoolbar-2.4.2": {
174 "python2.7-pyramid-debugtoolbar-2.4.2": {
178 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause",
175 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause",
179 "Repoze License": "http://www.repoze.org/LICENSE.txt"
176 "Repoze License": "http://www.repoze.org/LICENSE.txt"
180 },
177 },
181 "python2.7-pyramid-mako-1.0.2": {
178 "python2.7-pyramid-mako-1.0.2": {
182 "Repoze License": "http://www.repoze.org/LICENSE.txt"
179 "Repoze License": "http://www.repoze.org/LICENSE.txt"
183 },
180 },
184 "python2.7-pysqlite-2.6.3": {
181 "python2.7-pysqlite-2.6.3": {
185 "libpng License": "http://spdx.org/licenses/Libpng",
182 "libpng License": "http://spdx.org/licenses/Libpng",
186 "zlib License": "http://spdx.org/licenses/Zlib"
183 "zlib License": "http://spdx.org/licenses/Zlib"
187 },
184 },
188 "python2.7-pytest-2.8.5": {
185 "python2.7-pytest-2.8.5": {
189 "MIT License": "http://spdx.org/licenses/MIT"
186 "MIT License": "http://spdx.org/licenses/MIT"
190 },
187 },
191 "python2.7-pytest-runner-2.7.1": {
188 "python2.7-pytest-runner-2.7.1": {
192 "MIT License": "http://spdx.org/licenses/MIT"
189 "MIT License": "http://spdx.org/licenses/MIT"
193 },
190 },
194 "python2.7-python-dateutil-1.5": {
191 "python2.7-python-dateutil-1.5": {
195 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
192 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
196 },
193 },
197 "python2.7-python-editor-1.0.1": {
194 "python2.7-python-editor-1.0.1": {
198 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
195 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
199 },
196 },
200 "python2.7-python-ldap-2.4.19": {
197 "python2.7-python-ldap-2.4.19": {
201 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
198 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
202 },
199 },
203 "python2.7-python-memcached-1.57": {
200 "python2.7-python-memcached-1.57": {
204 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
201 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
205 },
202 },
206 "python2.7-pytz-2015.4": {
203 "python2.7-pytz-2015.4": {
207 "MIT License": "http://spdx.org/licenses/MIT"
204 "MIT License": "http://spdx.org/licenses/MIT"
208 },
205 },
209 "python2.7-recaptcha-client-1.0.6": {
206 "python2.7-recaptcha-client-1.0.6": {
210 "MIT License": "http://spdx.org/licenses/MIT"
207 "MIT License": "http://spdx.org/licenses/MIT"
211 },
208 },
212 "python2.7-repoze.lru-0.6": {
209 "python2.7-repoze.lru-0.6": {
213 "Repoze License": "http://www.repoze.org/LICENSE.txt"
210 "Repoze License": "http://www.repoze.org/LICENSE.txt"
214 },
211 },
215 "python2.7-requests-2.9.1": {
212 "python2.7-requests-2.9.1": {
216 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
213 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
217 },
214 },
218 "python2.7-serpent-1.12": {
219 "MIT License": "http://spdx.org/licenses/MIT"
220 },
221 "python2.7-setuptools-19.4": {
215 "python2.7-setuptools-19.4": {
222 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0",
216 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0",
223 "Zope Public License 2.0": "http://spdx.org/licenses/ZPL-2.0"
217 "Zope Public License 2.0": "http://spdx.org/licenses/ZPL-2.0"
224 },
218 },
225 "python2.7-setuptools-scm-1.11.0": {
219 "python2.7-setuptools-scm-1.11.0": {
226 "MIT License": "http://spdx.org/licenses/MIT"
220 "MIT License": "http://spdx.org/licenses/MIT"
227 },
221 },
228 "python2.7-simplejson-3.7.2": {
222 "python2.7-simplejson-3.7.2": {
229 "Academic Free License": "http://spdx.org/licenses/AFL-2.1",
223 "Academic Free License": "http://spdx.org/licenses/AFL-2.1",
230 "MIT License": "http://spdx.org/licenses/MIT"
224 "MIT License": "http://spdx.org/licenses/MIT"
231 },
225 },
232 "python2.7-six-1.9.0": {
226 "python2.7-six-1.9.0": {
233 "MIT License": "http://spdx.org/licenses/MIT"
227 "MIT License": "http://spdx.org/licenses/MIT"
234 },
228 },
235 "python2.7-translationstring-1.3": {
229 "python2.7-translationstring-1.3": {
236 "Repoze License": "http://www.repoze.org/LICENSE.txt"
230 "Repoze License": "http://www.repoze.org/LICENSE.txt"
237 },
231 },
238 "python2.7-urllib3-1.16": {
232 "python2.7-urllib3-1.16": {
239 "MIT License": "http://spdx.org/licenses/MIT"
233 "MIT License": "http://spdx.org/licenses/MIT"
240 },
234 },
241 "python2.7-venusian-1.0": {
235 "python2.7-venusian-1.0": {
242 "Repoze License": "http://www.repoze.org/LICENSE.txt"
236 "Repoze License": "http://www.repoze.org/LICENSE.txt"
243 },
237 },
244 "python2.7-waitress-0.8.9": {
238 "python2.7-waitress-0.8.9": {
245 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
239 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
246 },
240 },
247 "python2.7-zope.cachedescriptors-4.0.0": {
241 "python2.7-zope.cachedescriptors-4.0.0": {
248 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
242 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
249 },
243 },
250 "python2.7-zope.deprecation-4.1.2": {
244 "python2.7-zope.deprecation-4.1.2": {
251 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
245 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
252 },
246 },
253 "python2.7-zope.interface-4.1.3": {
247 "python2.7-zope.interface-4.1.3": {
254 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
248 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
255 }
249 }
256 } No newline at end of file
250 }
@@ -1,98 +1,80 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import shlex
22 import shlex
23 import Pyro4
24 import platform
23 import platform
25
24
26 from rhodecode.model import init_model
25 from rhodecode.model import init_model
27
26
28
27
29 def configure_pyro4(config):
30 """
31 Configure Pyro4 based on `config`.
32
33 This will mainly set the different configuration parameters of the Pyro4
34 library based on the settings in our INI files. The Pyro4 documentation
35 lists more details about the specific settings and their meaning.
36 """
37 Pyro4.config.COMMTIMEOUT = float(config['vcs.connection_timeout'])
38 Pyro4.config.SERIALIZER = 'pickle'
39 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
40
41 # Note: We need server configuration in the WSGI processes
42 # because we provide a callback server in certain vcs operations.
43 Pyro4.config.SERVERTYPE = "multiplex"
44 Pyro4.config.POLLTIMEOUT = 0.01
45
46
28
47 def configure_vcs(config):
29 def configure_vcs(config):
48 """
30 """
49 Patch VCS config with some RhodeCode specific stuff
31 Patch VCS config with some RhodeCode specific stuff
50 """
32 """
51 from rhodecode.lib.vcs import conf
33 from rhodecode.lib.vcs import conf
52 conf.settings.BACKENDS = {
34 conf.settings.BACKENDS = {
53 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
35 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
54 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
36 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
55 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
37 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
56 }
38 }
57
39
58 conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol']
40 conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol']
59 conf.settings.HOOKS_DIRECT_CALLS = config['vcs.hooks.direct_calls']
41 conf.settings.HOOKS_DIRECT_CALLS = config['vcs.hooks.direct_calls']
60 conf.settings.GIT_REV_FILTER = shlex.split(config['git_rev_filter'])
42 conf.settings.GIT_REV_FILTER = shlex.split(config['git_rev_filter'])
61 conf.settings.DEFAULT_ENCODINGS = config['default_encoding']
43 conf.settings.DEFAULT_ENCODINGS = config['default_encoding']
62 conf.settings.ALIASES[:] = config['vcs.backends']
44 conf.settings.ALIASES[:] = config['vcs.backends']
63 conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version']
45 conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version']
64
46
65
47
66 def initialize_database(config):
48 def initialize_database(config):
67 from rhodecode.lib.utils2 import engine_from_config, get_encryption_key
49 from rhodecode.lib.utils2 import engine_from_config, get_encryption_key
68 engine = engine_from_config(config, 'sqlalchemy.db1.')
50 engine = engine_from_config(config, 'sqlalchemy.db1.')
69 init_model(engine, encryption_key=get_encryption_key(config))
51 init_model(engine, encryption_key=get_encryption_key(config))
70
52
71
53
72 def initialize_test_environment(settings, test_env=None):
54 def initialize_test_environment(settings, test_env=None):
73 if test_env is None:
55 if test_env is None:
74 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
56 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
75
57
76 from rhodecode.lib.utils import (
58 from rhodecode.lib.utils import (
77 create_test_directory, create_test_database, create_test_repositories,
59 create_test_directory, create_test_database, create_test_repositories,
78 create_test_index)
60 create_test_index)
79 from rhodecode.tests import TESTS_TMP_PATH
61 from rhodecode.tests import TESTS_TMP_PATH
80 # test repos
62 # test repos
81 if test_env:
63 if test_env:
82 create_test_directory(TESTS_TMP_PATH)
64 create_test_directory(TESTS_TMP_PATH)
83 create_test_database(TESTS_TMP_PATH, settings)
65 create_test_database(TESTS_TMP_PATH, settings)
84 create_test_repositories(TESTS_TMP_PATH, settings)
66 create_test_repositories(TESTS_TMP_PATH, settings)
85 create_test_index(TESTS_TMP_PATH, settings)
67 create_test_index(TESTS_TMP_PATH, settings)
86
68
87
69
88 def get_vcs_server_protocol(config):
70 def get_vcs_server_protocol(config):
89 return config['vcs.server.protocol']
71 return config['vcs.server.protocol']
90
72
91
73
92 def set_instance_id(config):
74 def set_instance_id(config):
93 """ Sets a dynamic generated config['instance_id'] if missing or '*' """
75 """ Sets a dynamic generated config['instance_id'] if missing or '*' """
94
76
95 config['instance_id'] = config.get('instance_id') or ''
77 config['instance_id'] = config.get('instance_id') or ''
96 if config['instance_id'] == '*' or not config['instance_id']:
78 if config['instance_id'] == '*' or not config['instance_id']:
97 _platform_id = platform.uname()[1] or 'instance'
79 _platform_id = platform.uname()[1] or 'instance'
98 config['instance_id'] = '%s-%s' % (_platform_id, os.getpid())
80 config['instance_id'] = '%s-%s' % (_platform_id, os.getpid())
@@ -1,237 +1,236 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 """
20 """
21 celery libs for RhodeCode
21 celery libs for RhodeCode
22 """
22 """
23
23
24
24
25 import pylons
25 import pylons
26 import socket
26 import socket
27 import logging
27 import logging
28
28
29 import rhodecode
29 import rhodecode
30
30
31 from os.path import join as jn
31 from os.path import join as jn
32 from pylons import config
32 from pylons import config
33 from celery.task import Task
33 from celery.task import Task
34 from pyramid.request import Request
34 from pyramid.request import Request
35 from pyramid.scripting import prepare
35 from pyramid.scripting import prepare
36 from pyramid.threadlocal import get_current_request
36 from pyramid.threadlocal import get_current_request
37
37
38 from decorator import decorator
38 from decorator import decorator
39
39
40 from zope.cachedescriptors.property import Lazy as LazyProperty
40 from zope.cachedescriptors.property import Lazy as LazyProperty
41
41
42 from rhodecode.config import utils
42 from rhodecode.config import utils
43 from rhodecode.lib.utils2 import (
43 from rhodecode.lib.utils2 import (
44 safe_str, md5_safe, aslist, get_routes_generator_for_server_url,
44 safe_str, md5_safe, aslist, get_routes_generator_for_server_url,
45 get_server_url)
45 get_server_url)
46 from rhodecode.lib.pidlock import DaemonLock, LockHeld
46 from rhodecode.lib.pidlock import DaemonLock, LockHeld
47 from rhodecode.lib.vcs import connect_vcs
47 from rhodecode.lib.vcs import connect_vcs
48 from rhodecode.model import meta
48 from rhodecode.model import meta
49 from rhodecode.lib.auth import AuthUser
49 from rhodecode.lib.auth import AuthUser
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class ResultWrapper(object):
54 class ResultWrapper(object):
55 def __init__(self, task):
55 def __init__(self, task):
56 self.task = task
56 self.task = task
57
57
58 @LazyProperty
58 @LazyProperty
59 def result(self):
59 def result(self):
60 return self.task
60 return self.task
61
61
62
62
63 class RhodecodeCeleryTask(Task):
63 class RhodecodeCeleryTask(Task):
64 """
64 """
65 This is a celery task which will create a rhodecode app instance context
65 This is a celery task which will create a rhodecode app instance context
66 for the task, patch pyramid + pylons threadlocals with the original request
66 for the task, patch pyramid + pylons threadlocals with the original request
67 that created the task and also add the user to the context.
67 that created the task and also add the user to the context.
68
68
69 This class as a whole should be removed once the pylons port is complete
69 This class as a whole should be removed once the pylons port is complete
70 and a pyramid only solution for celery is implemented as per issue #4139
70 and a pyramid only solution for celery is implemented as per issue #4139
71 """
71 """
72
72
73 def apply_async(self, args=None, kwargs=None, task_id=None, producer=None,
73 def apply_async(self, args=None, kwargs=None, task_id=None, producer=None,
74 link=None, link_error=None, **options):
74 link=None, link_error=None, **options):
75 """ queue the job to run (we are in web request context here) """
75 """ queue the job to run (we are in web request context here) """
76
76
77 request = get_current_request()
77 request = get_current_request()
78
78
79 if hasattr(request, 'user'):
79 if hasattr(request, 'user'):
80 ip_addr = request.user.ip_addr
80 ip_addr = request.user.ip_addr
81 user_id = request.user.user_id
81 user_id = request.user.user_id
82 elif hasattr(request, 'rpc_params'):
82 elif hasattr(request, 'rpc_params'):
83 # TODO(marcink) remove when migration is finished
83 # TODO(marcink) remove when migration is finished
84 # api specific call on Pyramid.
84 # api specific call on Pyramid.
85 ip_addr = request.rpc_params['apiuser'].ip_addr
85 ip_addr = request.rpc_params['apiuser'].ip_addr
86 user_id = request.rpc_params['apiuser'].user_id
86 user_id = request.rpc_params['apiuser'].user_id
87 else:
87 else:
88 raise Exception('Unable to fetch data from request: {}'.format(
88 raise Exception('Unable to fetch data from request: {}'.format(
89 request))
89 request))
90
90
91 if request:
91 if request:
92 # we hook into kwargs since it is the only way to pass our data to
92 # we hook into kwargs since it is the only way to pass our data to
93 # the celery worker in celery 2.2
93 # the celery worker in celery 2.2
94 kwargs.update({
94 kwargs.update({
95 '_rhodecode_proxy_data': {
95 '_rhodecode_proxy_data': {
96 'environ': {
96 'environ': {
97 'PATH_INFO': request.environ['PATH_INFO'],
97 'PATH_INFO': request.environ['PATH_INFO'],
98 'SCRIPT_NAME': request.environ['SCRIPT_NAME'],
98 'SCRIPT_NAME': request.environ['SCRIPT_NAME'],
99 'HTTP_HOST': request.environ.get('HTTP_HOST',
99 'HTTP_HOST': request.environ.get('HTTP_HOST',
100 request.environ['SERVER_NAME']),
100 request.environ['SERVER_NAME']),
101 'SERVER_NAME': request.environ['SERVER_NAME'],
101 'SERVER_NAME': request.environ['SERVER_NAME'],
102 'SERVER_PORT': request.environ['SERVER_PORT'],
102 'SERVER_PORT': request.environ['SERVER_PORT'],
103 'wsgi.url_scheme': request.environ['wsgi.url_scheme'],
103 'wsgi.url_scheme': request.environ['wsgi.url_scheme'],
104 },
104 },
105 'auth_user': {
105 'auth_user': {
106 'ip_addr': ip_addr,
106 'ip_addr': ip_addr,
107 'user_id': user_id
107 'user_id': user_id
108 },
108 },
109 }
109 }
110 })
110 })
111 return super(RhodecodeCeleryTask, self).apply_async(
111 return super(RhodecodeCeleryTask, self).apply_async(
112 args, kwargs, task_id, producer, link, link_error, **options)
112 args, kwargs, task_id, producer, link, link_error, **options)
113
113
114 def __call__(self, *args, **kwargs):
114 def __call__(self, *args, **kwargs):
115 """ rebuild the context and then run task on celery worker """
115 """ rebuild the context and then run task on celery worker """
116 proxy_data = kwargs.pop('_rhodecode_proxy_data', {})
116 proxy_data = kwargs.pop('_rhodecode_proxy_data', {})
117
117
118 if not proxy_data:
118 if not proxy_data:
119 return super(RhodecodeCeleryTask, self).__call__(*args, **kwargs)
119 return super(RhodecodeCeleryTask, self).__call__(*args, **kwargs)
120
120
121 log.debug('using celery proxy data to run task: %r', proxy_data)
121 log.debug('using celery proxy data to run task: %r', proxy_data)
122
122
123 from rhodecode.config.routing import make_map
123 from rhodecode.config.routing import make_map
124
124
125 request = Request.blank('/', environ=proxy_data['environ'])
125 request = Request.blank('/', environ=proxy_data['environ'])
126 request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'],
126 request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'],
127 ip_addr=proxy_data['auth_user']['ip_addr'])
127 ip_addr=proxy_data['auth_user']['ip_addr'])
128
128
129 pyramid_request = prepare(request) # set pyramid threadlocal request
129 pyramid_request = prepare(request) # set pyramid threadlocal request
130
130
131 # pylons routing
131 # pylons routing
132 if not rhodecode.CONFIG.get('routes.map'):
132 if not rhodecode.CONFIG.get('routes.map'):
133 rhodecode.CONFIG['routes.map'] = make_map(config)
133 rhodecode.CONFIG['routes.map'] = make_map(config)
134 pylons.url._push_object(get_routes_generator_for_server_url(
134 pylons.url._push_object(get_routes_generator_for_server_url(
135 get_server_url(request.environ)
135 get_server_url(request.environ)
136 ))
136 ))
137
137
138 try:
138 try:
139 return super(RhodecodeCeleryTask, self).__call__(*args, **kwargs)
139 return super(RhodecodeCeleryTask, self).__call__(*args, **kwargs)
140 finally:
140 finally:
141 pyramid_request['closer']()
141 pyramid_request['closer']()
142 pylons.url._pop_object()
142 pylons.url._pop_object()
143
143
144
144
145 def run_task(task, *args, **kwargs):
145 def run_task(task, *args, **kwargs):
146 if rhodecode.CELERY_ENABLED:
146 if rhodecode.CELERY_ENABLED:
147 celery_is_up = False
147 celery_is_up = False
148 try:
148 try:
149 t = task.apply_async(args=args, kwargs=kwargs)
149 t = task.apply_async(args=args, kwargs=kwargs)
150 log.info('running task %s:%s', t.task_id, task)
150 log.info('running task %s:%s', t.task_id, task)
151 celery_is_up = True
151 celery_is_up = True
152 return t
152 return t
153
153
154 except socket.error as e:
154 except socket.error as e:
155 if isinstance(e, IOError) and e.errno == 111:
155 if isinstance(e, IOError) and e.errno == 111:
156 log.error('Unable to connect to celeryd. Sync execution')
156 log.error('Unable to connect to celeryd. Sync execution')
157 else:
157 else:
158 log.exception("Exception while connecting to celeryd.")
158 log.exception("Exception while connecting to celeryd.")
159 except KeyError as e:
159 except KeyError as e:
160 log.error('Unable to connect to celeryd. Sync execution')
160 log.error('Unable to connect to celeryd. Sync execution')
161 except Exception as e:
161 except Exception as e:
162 log.exception(
162 log.exception(
163 "Exception while trying to run task asynchronous. "
163 "Exception while trying to run task asynchronous. "
164 "Fallback to sync execution.")
164 "Fallback to sync execution.")
165
165
166 # keep in mind there maybe a subtle race condition where something
166 # keep in mind there maybe a subtle race condition where something
167 # depending on rhodecode.CELERY_ENABLED such as @dbsession decorator
167 # depending on rhodecode.CELERY_ENABLED such as @dbsession decorator
168 # will see CELERY_ENABLED as True before this has a chance to set False
168 # will see CELERY_ENABLED as True before this has a chance to set False
169 rhodecode.CELERY_ENABLED = celery_is_up
169 rhodecode.CELERY_ENABLED = celery_is_up
170 else:
170 else:
171 log.debug('executing task %s in sync mode', task)
171 log.debug('executing task %s in sync mode', task)
172 return ResultWrapper(task(*args, **kwargs))
172 return ResultWrapper(task(*args, **kwargs))
173
173
174
174
175 def __get_lockkey(func, *fargs, **fkwargs):
175 def __get_lockkey(func, *fargs, **fkwargs):
176 params = list(fargs)
176 params = list(fargs)
177 params.extend(['%s-%s' % ar for ar in fkwargs.items()])
177 params.extend(['%s-%s' % ar for ar in fkwargs.items()])
178
178
179 func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
179 func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
180 _lock_key = func_name + '-' + '-'.join(map(safe_str, params))
180 _lock_key = func_name + '-' + '-'.join(map(safe_str, params))
181 return 'task_%s.lock' % (md5_safe(_lock_key),)
181 return 'task_%s.lock' % (md5_safe(_lock_key),)
182
182
183
183
184 def locked_task(func):
184 def locked_task(func):
185 def __wrapper(func, *fargs, **fkwargs):
185 def __wrapper(func, *fargs, **fkwargs):
186 lockkey = __get_lockkey(func, *fargs, **fkwargs)
186 lockkey = __get_lockkey(func, *fargs, **fkwargs)
187 lockkey_path = config['app_conf']['cache_dir']
187 lockkey_path = config['app_conf']['cache_dir']
188
188
189 log.info('running task with lockkey %s' % lockkey)
189 log.info('running task with lockkey %s' % lockkey)
190 try:
190 try:
191 l = DaemonLock(file_=jn(lockkey_path, lockkey))
191 l = DaemonLock(file_=jn(lockkey_path, lockkey))
192 ret = func(*fargs, **fkwargs)
192 ret = func(*fargs, **fkwargs)
193 l.release()
193 l.release()
194 return ret
194 return ret
195 except LockHeld:
195 except LockHeld:
196 log.info('LockHeld')
196 log.info('LockHeld')
197 return 'Task with key %s already running' % lockkey
197 return 'Task with key %s already running' % lockkey
198
198
199 return decorator(__wrapper, func)
199 return decorator(__wrapper, func)
200
200
201
201
202 def get_session():
202 def get_session():
203 if rhodecode.CELERY_ENABLED:
203 if rhodecode.CELERY_ENABLED:
204 utils.initialize_database(config)
204 utils.initialize_database(config)
205 sa = meta.Session()
205 sa = meta.Session()
206 return sa
206 return sa
207
207
208
208
209 def dbsession(func):
209 def dbsession(func):
210 def __wrapper(func, *fargs, **fkwargs):
210 def __wrapper(func, *fargs, **fkwargs):
211 try:
211 try:
212 ret = func(*fargs, **fkwargs)
212 ret = func(*fargs, **fkwargs)
213 return ret
213 return ret
214 finally:
214 finally:
215 if rhodecode.CELERY_ENABLED and not rhodecode.CELERY_EAGER:
215 if rhodecode.CELERY_ENABLED and not rhodecode.CELERY_EAGER:
216 meta.Session.remove()
216 meta.Session.remove()
217
217
218 return decorator(__wrapper, func)
218 return decorator(__wrapper, func)
219
219
220
220
221 def vcsconnection(func):
221 def vcsconnection(func):
222 def __wrapper(func, *fargs, **fkwargs):
222 def __wrapper(func, *fargs, **fkwargs):
223 if rhodecode.CELERY_ENABLED and not rhodecode.CELERY_EAGER:
223 if rhodecode.CELERY_ENABLED and not rhodecode.CELERY_EAGER:
224 settings = rhodecode.PYRAMID_SETTINGS
224 settings = rhodecode.PYRAMID_SETTINGS
225 backends = settings['vcs.backends']
225 backends = settings['vcs.backends']
226 for alias in rhodecode.BACKENDS.keys():
226 for alias in rhodecode.BACKENDS.keys():
227 if alias not in backends:
227 if alias not in backends:
228 del rhodecode.BACKENDS[alias]
228 del rhodecode.BACKENDS[alias]
229 utils.configure_pyro4(settings)
230 utils.configure_vcs(settings)
229 utils.configure_vcs(settings)
231 connect_vcs(
230 connect_vcs(
232 settings['vcs.server'],
231 settings['vcs.server'],
233 utils.get_vcs_server_protocol(settings))
232 utils.get_vcs_server_protocol(settings))
234 ret = func(*fargs, **fkwargs)
233 ret = func(*fargs, **fkwargs)
235 return ret
234 return ret
236
235
237 return decorator(__wrapper, func)
236 return decorator(__wrapper, func)
@@ -1,278 +1,238 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import logging
22 import logging
23 import urlparse
23 import urlparse
24 import threading
24 import threading
25 from BaseHTTPServer import BaseHTTPRequestHandler
25 from BaseHTTPServer import BaseHTTPRequestHandler
26 from SocketServer import TCPServer
26 from SocketServer import TCPServer
27 from routes.util import URLGenerator
27 from routes.util import URLGenerator
28
28
29 import Pyro4
30 import pylons
29 import pylons
31 import rhodecode
30 import rhodecode
32
31
33 from rhodecode.model import meta
32 from rhodecode.model import meta
34 from rhodecode.lib import hooks_base
33 from rhodecode.lib import hooks_base
35 from rhodecode.lib.utils2 import (
34 from rhodecode.lib.utils2 import (
36 AttributeDict, safe_str, get_routes_generator_for_server_url)
35 AttributeDict, safe_str, get_routes_generator_for_server_url)
37
36
38
37
39 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
40
39
41
40
42 class HooksHttpHandler(BaseHTTPRequestHandler):
41 class HooksHttpHandler(BaseHTTPRequestHandler):
43 def do_POST(self):
42 def do_POST(self):
44 method, extras = self._read_request()
43 method, extras = self._read_request()
45 try:
44 try:
46 result = self._call_hook(method, extras)
45 result = self._call_hook(method, extras)
47 except Exception as e:
46 except Exception as e:
48 result = {
47 result = {
49 'exception': e.__class__.__name__,
48 'exception': e.__class__.__name__,
50 'exception_args': e.args
49 'exception_args': e.args
51 }
50 }
52 self._write_response(result)
51 self._write_response(result)
53
52
54 def _read_request(self):
53 def _read_request(self):
55 length = int(self.headers['Content-Length'])
54 length = int(self.headers['Content-Length'])
56 body = self.rfile.read(length).decode('utf-8')
55 body = self.rfile.read(length).decode('utf-8')
57 data = json.loads(body)
56 data = json.loads(body)
58 return data['method'], data['extras']
57 return data['method'], data['extras']
59
58
60 def _write_response(self, result):
59 def _write_response(self, result):
61 self.send_response(200)
60 self.send_response(200)
62 self.send_header("Content-type", "text/json")
61 self.send_header("Content-type", "text/json")
63 self.end_headers()
62 self.end_headers()
64 self.wfile.write(json.dumps(result))
63 self.wfile.write(json.dumps(result))
65
64
66 def _call_hook(self, method, extras):
65 def _call_hook(self, method, extras):
67 hooks = Hooks()
66 hooks = Hooks()
68 try:
67 try:
69 result = getattr(hooks, method)(extras)
68 result = getattr(hooks, method)(extras)
70 finally:
69 finally:
71 meta.Session.remove()
70 meta.Session.remove()
72 return result
71 return result
73
72
74 def log_message(self, format, *args):
73 def log_message(self, format, *args):
75 """
74 """
76 This is an overriden method of BaseHTTPRequestHandler which logs using
75 This is an overridden method of BaseHTTPRequestHandler which logs using
77 logging library instead of writing directly to stderr.
76 logging library instead of writing directly to stderr.
78 """
77 """
79
78
80 message = format % args
79 message = format % args
81
80
82 # TODO: mikhail: add different log levels support
81 # TODO: mikhail: add different log levels support
83 log.debug(
82 log.debug(
84 "%s - - [%s] %s", self.client_address[0],
83 "%s - - [%s] %s", self.client_address[0],
85 self.log_date_time_string(), message)
84 self.log_date_time_string(), message)
86
85
87
86
88 class DummyHooksCallbackDaemon(object):
87 class DummyHooksCallbackDaemon(object):
89 def __init__(self):
88 def __init__(self):
90 self.hooks_module = Hooks.__module__
89 self.hooks_module = Hooks.__module__
91
90
92 def __enter__(self):
91 def __enter__(self):
93 log.debug('Running dummy hooks callback daemon')
92 log.debug('Running dummy hooks callback daemon')
94 return self
93 return self
95
94
96 def __exit__(self, exc_type, exc_val, exc_tb):
95 def __exit__(self, exc_type, exc_val, exc_tb):
97 log.debug('Exiting dummy hooks callback daemon')
96 log.debug('Exiting dummy hooks callback daemon')
98
97
99
98
100 class ThreadedHookCallbackDaemon(object):
99 class ThreadedHookCallbackDaemon(object):
101
100
102 _callback_thread = None
101 _callback_thread = None
103 _daemon = None
102 _daemon = None
104 _done = False
103 _done = False
105
104
106 def __init__(self):
105 def __init__(self):
107 self._prepare()
106 self._prepare()
108
107
109 def __enter__(self):
108 def __enter__(self):
110 self._run()
109 self._run()
111 return self
110 return self
112
111
113 def __exit__(self, exc_type, exc_val, exc_tb):
112 def __exit__(self, exc_type, exc_val, exc_tb):
114 self._stop()
113 self._stop()
115
114
116 def _prepare(self):
115 def _prepare(self):
117 raise NotImplementedError()
116 raise NotImplementedError()
118
117
119 def _run(self):
118 def _run(self):
120 raise NotImplementedError()
119 raise NotImplementedError()
121
120
122 def _stop(self):
121 def _stop(self):
123 raise NotImplementedError()
122 raise NotImplementedError()
124
123
125
124
126 class Pyro4HooksCallbackDaemon(ThreadedHookCallbackDaemon):
127 """
128 Context manager which will run a callback daemon in a background thread.
129 """
130
131 hooks_uri = None
132
133 def _prepare(self):
134 log.debug("Preparing callback daemon and registering hook object")
135 self._daemon = Pyro4.Daemon()
136 hooks_interface = Hooks()
137 self.hooks_uri = str(self._daemon.register(hooks_interface))
138 log.debug("Hooks uri is: %s", self.hooks_uri)
139
140 def _run(self):
141 log.debug("Running event loop of callback daemon in background thread")
142 callback_thread = threading.Thread(
143 target=self._daemon.requestLoop,
144 kwargs={'loopCondition': lambda: not self._done})
145 callback_thread.daemon = True
146 callback_thread.start()
147 self._callback_thread = callback_thread
148
149 def _stop(self):
150 log.debug("Waiting for background thread to finish.")
151 self._done = True
152 self._callback_thread.join()
153 self._daemon.close()
154 self._daemon = None
155 self._callback_thread = None
156
157
158 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
125 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
159 """
126 """
160 Context manager which will run a callback daemon in a background thread.
127 Context manager which will run a callback daemon in a background thread.
161 """
128 """
162
129
163 hooks_uri = None
130 hooks_uri = None
164
131
165 IP_ADDRESS = '127.0.0.1'
132 IP_ADDRESS = '127.0.0.1'
166
133
167 # From Python docs: Polling reduces our responsiveness to a shutdown
134 # From Python docs: Polling reduces our responsiveness to a shutdown
168 # request and wastes cpu at all other times.
135 # request and wastes cpu at all other times.
169 POLL_INTERVAL = 0.1
136 POLL_INTERVAL = 0.1
170
137
171 def _prepare(self):
138 def _prepare(self):
172 log.debug("Preparing callback daemon and registering hook object")
139 log.debug("Preparing callback daemon and registering hook object")
173
140
174 self._done = False
141 self._done = False
175 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
142 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
176 _, port = self._daemon.server_address
143 _, port = self._daemon.server_address
177 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
144 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
178
145
179 log.debug("Hooks uri is: %s", self.hooks_uri)
146 log.debug("Hooks uri is: %s", self.hooks_uri)
180
147
181 def _run(self):
148 def _run(self):
182 log.debug("Running event loop of callback daemon in background thread")
149 log.debug("Running event loop of callback daemon in background thread")
183 callback_thread = threading.Thread(
150 callback_thread = threading.Thread(
184 target=self._daemon.serve_forever,
151 target=self._daemon.serve_forever,
185 kwargs={'poll_interval': self.POLL_INTERVAL})
152 kwargs={'poll_interval': self.POLL_INTERVAL})
186 callback_thread.daemon = True
153 callback_thread.daemon = True
187 callback_thread.start()
154 callback_thread.start()
188 self._callback_thread = callback_thread
155 self._callback_thread = callback_thread
189
156
190 def _stop(self):
157 def _stop(self):
191 log.debug("Waiting for background thread to finish.")
158 log.debug("Waiting for background thread to finish.")
192 self._daemon.shutdown()
159 self._daemon.shutdown()
193 self._callback_thread.join()
160 self._callback_thread.join()
194 self._daemon = None
161 self._daemon = None
195 self._callback_thread = None
162 self._callback_thread = None
196
163
197
164
198 def prepare_callback_daemon(extras, protocol, use_direct_calls):
165 def prepare_callback_daemon(extras, protocol, use_direct_calls):
199 callback_daemon = None
166 callback_daemon = None
200
167
201 if use_direct_calls:
168 if use_direct_calls:
202 callback_daemon = DummyHooksCallbackDaemon()
169 callback_daemon = DummyHooksCallbackDaemon()
203 extras['hooks_module'] = callback_daemon.hooks_module
170 extras['hooks_module'] = callback_daemon.hooks_module
204 else:
171 else:
205 if protocol == 'pyro4':
172 if protocol == 'http':
206 callback_daemon = Pyro4HooksCallbackDaemon()
207 elif protocol == 'http':
208 callback_daemon = HttpHooksCallbackDaemon()
173 callback_daemon = HttpHooksCallbackDaemon()
209 else:
174 else:
210 log.error('Unsupported callback daemon protocol "%s"', protocol)
175 log.error('Unsupported callback daemon protocol "%s"', protocol)
211 raise Exception('Unsupported callback daemon protocol.')
176 raise Exception('Unsupported callback daemon protocol.')
212
177
213 extras['hooks_uri'] = callback_daemon.hooks_uri
178 extras['hooks_uri'] = callback_daemon.hooks_uri
214 extras['hooks_protocol'] = protocol
179 extras['hooks_protocol'] = protocol
215
180
216 return callback_daemon, extras
181 return callback_daemon, extras
217
182
218
183
219 class Hooks(object):
184 class Hooks(object):
220 """
185 """
221 Exposes the hooks for remote call backs
186 Exposes the hooks for remote call backs
222 """
187 """
223
188
224 @Pyro4.callback
225 def repo_size(self, extras):
189 def repo_size(self, extras):
226 log.debug("Called repo_size of Hooks object")
190 log.debug("Called repo_size of Hooks object")
227 return self._call_hook(hooks_base.repo_size, extras)
191 return self._call_hook(hooks_base.repo_size, extras)
228
192
229 @Pyro4.callback
230 def pre_pull(self, extras):
193 def pre_pull(self, extras):
231 log.debug("Called pre_pull of Hooks object")
194 log.debug("Called pre_pull of Hooks object")
232 return self._call_hook(hooks_base.pre_pull, extras)
195 return self._call_hook(hooks_base.pre_pull, extras)
233
196
234 @Pyro4.callback
235 def post_pull(self, extras):
197 def post_pull(self, extras):
236 log.debug("Called post_pull of Hooks object")
198 log.debug("Called post_pull of Hooks object")
237 return self._call_hook(hooks_base.post_pull, extras)
199 return self._call_hook(hooks_base.post_pull, extras)
238
200
239 @Pyro4.callback
240 def pre_push(self, extras):
201 def pre_push(self, extras):
241 log.debug("Called pre_push of Hooks object")
202 log.debug("Called pre_push of Hooks object")
242 return self._call_hook(hooks_base.pre_push, extras)
203 return self._call_hook(hooks_base.pre_push, extras)
243
204
244 @Pyro4.callback
245 def post_push(self, extras):
205 def post_push(self, extras):
246 log.debug("Called post_push of Hooks object")
206 log.debug("Called post_push of Hooks object")
247 return self._call_hook(hooks_base.post_push, extras)
207 return self._call_hook(hooks_base.post_push, extras)
248
208
249 def _call_hook(self, hook, extras):
209 def _call_hook(self, hook, extras):
250 extras = AttributeDict(extras)
210 extras = AttributeDict(extras)
251 pylons_router = get_routes_generator_for_server_url(extras.server_url)
211 pylons_router = get_routes_generator_for_server_url(extras.server_url)
252 pylons.url._push_object(pylons_router)
212 pylons.url._push_object(pylons_router)
253
213
254 try:
214 try:
255 result = hook(extras)
215 result = hook(extras)
256 except Exception as error:
216 except Exception as error:
257 log.exception('Exception when handling hook %s', hook)
217 log.exception('Exception when handling hook %s', hook)
258 error_args = error.args
218 error_args = error.args
259 return {
219 return {
260 'status': 128,
220 'status': 128,
261 'output': '',
221 'output': '',
262 'exception': type(error).__name__,
222 'exception': type(error).__name__,
263 'exception_args': error_args,
223 'exception_args': error_args,
264 }
224 }
265 finally:
225 finally:
266 pylons.url._pop_object()
226 pylons.url._pop_object()
267 meta.Session.remove()
227 meta.Session.remove()
268
228
269 return {
229 return {
270 'status': result.status,
230 'status': result.status,
271 'output': result.output,
231 'output': result.output,
272 }
232 }
273
233
274 def __enter__(self):
234 def __enter__(self):
275 return self
235 return self
276
236
277 def __exit__(self, exc_type, exc_val, exc_tb):
237 def __exit__(self, exc_type, exc_val, exc_tb):
278 pass
238 pass
@@ -1,136 +1,141 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import sys
21 import sys
22 import logging
22 import logging
23
23
24
24
25 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
25 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
26
26
27 # Sequences
27 # Sequences
28 RESET_SEQ = "\033[0m"
28 RESET_SEQ = "\033[0m"
29 COLOR_SEQ = "\033[0;%dm"
29 COLOR_SEQ = "\033[0;%dm"
30 BOLD_SEQ = "\033[1m"
30 BOLD_SEQ = "\033[1m"
31
31
32 COLORS = {
32 COLORS = {
33 'CRITICAL': MAGENTA,
33 'CRITICAL': MAGENTA,
34 'ERROR': RED,
34 'ERROR': RED,
35 'WARNING': CYAN,
35 'WARNING': CYAN,
36 'INFO': GREEN,
36 'INFO': GREEN,
37 'DEBUG': BLUE,
37 'DEBUG': BLUE,
38 'SQL': YELLOW
38 'SQL': YELLOW
39 }
39 }
40
40
41
41
42 def one_space_trim(s):
42 def one_space_trim(s):
43 if s.find(" ") == -1:
43 if s.find(" ") == -1:
44 return s
44 return s
45 else:
45 else:
46 s = s.replace(' ', ' ')
46 s = s.replace(' ', ' ')
47 return one_space_trim(s)
47 return one_space_trim(s)
48
48
49
49
50 def format_sql(sql):
50 def format_sql(sql):
51 sql = sql.replace('\n', '')
51 sql = sql.replace('\n', '')
52 sql = one_space_trim(sql)
52 sql = one_space_trim(sql)
53 sql = sql\
53 sql = sql\
54 .replace(',', ',\n\t')\
54 .replace(',', ',\n\t')\
55 .replace('SELECT', '\n\tSELECT \n\t')\
55 .replace('SELECT', '\n\tSELECT \n\t')\
56 .replace('UPDATE', '\n\tUPDATE \n\t')\
56 .replace('UPDATE', '\n\tUPDATE \n\t')\
57 .replace('DELETE', '\n\tDELETE \n\t')\
57 .replace('DELETE', '\n\tDELETE \n\t')\
58 .replace('FROM', '\n\tFROM')\
58 .replace('FROM', '\n\tFROM')\
59 .replace('ORDER BY', '\n\tORDER BY')\
59 .replace('ORDER BY', '\n\tORDER BY')\
60 .replace('LIMIT', '\n\tLIMIT')\
60 .replace('LIMIT', '\n\tLIMIT')\
61 .replace('WHERE', '\n\tWHERE')\
61 .replace('WHERE', '\n\tWHERE')\
62 .replace('AND', '\n\tAND')\
62 .replace('AND', '\n\tAND')\
63 .replace('LEFT', '\n\tLEFT')\
63 .replace('LEFT', '\n\tLEFT')\
64 .replace('INNER', '\n\tINNER')\
64 .replace('INNER', '\n\tINNER')\
65 .replace('INSERT', '\n\tINSERT')\
65 .replace('INSERT', '\n\tINSERT')\
66 .replace('DELETE', '\n\tDELETE')
66 .replace('DELETE', '\n\tDELETE')
67 return sql
67 return sql
68
68
69
69
70 class Pyro4AwareFormatter(logging.Formatter):
70 class ExceptionAwareFormatter(logging.Formatter):
71 """
71 """
72 Extended logging formatter which prints out Pyro4 remote tracebacks.
72 Extended logging formatter which prints out remote tracebacks.
73 """
73 """
74
74
75 def formatException(self, ei):
75 def formatException(self, ei):
76 ex_type, ex_value, ex_tb = ei
76 ex_type, ex_value, ex_tb = ei
77
77
78 local_tb = logging.Formatter.formatException(self, ei)
78 local_tb = logging.Formatter.formatException(self, ei)
79 if hasattr(ex_value, '_vcs_server_traceback'):
79 if hasattr(ex_value, '_vcs_server_traceback'):
80
80
81 def formatRemoteTraceback(remote_tb_lines):
81 def formatRemoteTraceback(remote_tb_lines):
82 result = ["\n +--- This exception occured remotely on VCSServer - Remote traceback:\n\n"]
82 result = ["\n +--- This exception occured remotely on VCSServer - Remote traceback:\n\n"]
83 result.append(remote_tb_lines)
83 result.append(remote_tb_lines)
84 result.append("\n +--- End of remote traceback\n")
84 result.append("\n +--- End of remote traceback\n")
85 return result
85 return result
86
86
87 try:
87 try:
88 if ex_type is not None and ex_value is None and ex_tb is None:
88 if ex_type is not None and ex_value is None and ex_tb is None:
89 # possible old (3.x) call syntax where caller is only providing exception object
89 # possible old (3.x) call syntax where caller is only
90 # providing exception object
90 if type(ex_type) is not type:
91 if type(ex_type) is not type:
91 raise TypeError(
92 raise TypeError(
92 "invalid argument: ex_type should be an exception type, or just supply no arguments at all")
93 "invalid argument: ex_type should be an exception "
94 "type, or just supply no arguments at all")
93 if ex_type is None and ex_tb is None:
95 if ex_type is None and ex_tb is None:
94 ex_type, ex_value, ex_tb = sys.exc_info()
96 ex_type, ex_value, ex_tb = sys.exc_info()
95
97
96 remote_tb = getattr(ex_value, "_vcs_server_traceback", None)
98 remote_tb = getattr(ex_value, "_vcs_server_traceback", None)
97
99
98 if remote_tb:
100 if remote_tb:
99 remote_tb = formatRemoteTraceback(remote_tb)
101 remote_tb = formatRemoteTraceback(remote_tb)
100 return local_tb + ''.join(remote_tb)
102 return local_tb + ''.join(remote_tb)
101 finally:
103 finally:
102 # clean up cycle to traceback, to allow proper GC
104 # clean up cycle to traceback, to allow proper GC
103 del ex_type, ex_value, ex_tb
105 del ex_type, ex_value, ex_tb
104
106
105 return local_tb
107 return local_tb
106
108
107
109
108 class ColorFormatter(Pyro4AwareFormatter):
110 class ColorFormatter(ExceptionAwareFormatter):
109
111
110 def format(self, record):
112 def format(self, record):
111 """
113 """
112 Changes record's levelname to use with COLORS enum
114 Changes record's levelname to use with COLORS enum
113 """
115 """
114
116
115 levelname = record.levelname
117 levelname = record.levelname
116 start = COLOR_SEQ % (COLORS[levelname])
118 start = COLOR_SEQ % (COLORS[levelname])
117 def_record = logging.Formatter.format(self, record)
119 def_record = logging.Formatter.format(self, record)
118 end = RESET_SEQ
120 end = RESET_SEQ
119
121
120 colored_record = ''.join([start, def_record, end])
122 colored_record = ''.join([start, def_record, end])
121 return colored_record
123 return colored_record
122
124
123
125
124 class ColorFormatterSql(logging.Formatter):
126 class ColorFormatterSql(logging.Formatter):
125
127
126 def format(self, record):
128 def format(self, record):
127 """
129 """
128 Changes record's levelname to use with COLORS enum
130 Changes record's levelname to use with COLORS enum
129 """
131 """
130
132
131 start = COLOR_SEQ % (COLORS['SQL'])
133 start = COLOR_SEQ % (COLORS['SQL'])
132 def_record = format_sql(logging.Formatter.format(self, record))
134 def_record = format_sql(logging.Formatter.format(self, record))
133 end = RESET_SEQ
135 end = RESET_SEQ
134
136
135 colored_record = ''.join([start, def_record, end])
137 colored_record = ''.join([start, def_record, end])
136 return colored_record
138 return colored_record
139
140 # marcink: needs to stay with this name for backward .ini compatability
141 Pyro4AwareFormatter = ExceptionAwareFormatter
@@ -1,529 +1,526 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2017 RhodeCode GmbH
3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 It's implemented with basic auth function
23 It's implemented with basic auth function
24 """
24 """
25
25
26 import os
26 import os
27 import logging
27 import logging
28 import importlib
28 import importlib
29 import re
29 import re
30 from functools import wraps
30 from functools import wraps
31
31
32 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
32 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
33 from webob.exc import (
33 from webob.exc import (
34 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
34 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
35
35
36 import rhodecode
36 import rhodecode
37 from rhodecode.authentication.base import authenticate, VCS_TYPE
37 from rhodecode.authentication.base import authenticate, VCS_TYPE
38 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
38 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
39 from rhodecode.lib.base import BasicAuth, get_ip_addr, vcs_operation_context
39 from rhodecode.lib.base import BasicAuth, get_ip_addr, vcs_operation_context
40 from rhodecode.lib.exceptions import (
40 from rhodecode.lib.exceptions import (
41 HTTPLockedRC, HTTPRequirementError, UserCreationError,
41 HTTPLockedRC, HTTPRequirementError, UserCreationError,
42 NotAllowedToCreateUserError)
42 NotAllowedToCreateUserError)
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 from rhodecode.lib.middleware import appenlight
44 from rhodecode.lib.middleware import appenlight
45 from rhodecode.lib.middleware.utils import scm_app, scm_app_http
45 from rhodecode.lib.middleware.utils import scm_app_http
46 from rhodecode.lib.utils import (
46 from rhodecode.lib.utils import (
47 is_valid_repo, get_rhodecode_realm, get_rhodecode_base_path, SLUG_RE)
47 is_valid_repo, get_rhodecode_realm, get_rhodecode_base_path, SLUG_RE)
48 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
48 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
49 from rhodecode.lib.vcs.conf import settings as vcs_settings
49 from rhodecode.lib.vcs.conf import settings as vcs_settings
50 from rhodecode.lib.vcs.backends import base
50 from rhodecode.lib.vcs.backends import base
51 from rhodecode.model import meta
51 from rhodecode.model import meta
52 from rhodecode.model.db import User, Repository, PullRequest
52 from rhodecode.model.db import User, Repository, PullRequest
53 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.scm import ScmModel
54 from rhodecode.model.pull_request import PullRequestModel
54 from rhodecode.model.pull_request import PullRequestModel
55
55
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 def initialize_generator(factory):
60 def initialize_generator(factory):
61 """
61 """
62 Initializes the returned generator by draining its first element.
62 Initializes the returned generator by draining its first element.
63
63
64 This can be used to give a generator an initializer, which is the code
64 This can be used to give a generator an initializer, which is the code
65 up to the first yield statement. This decorator enforces that the first
65 up to the first yield statement. This decorator enforces that the first
66 produced element has the value ``"__init__"`` to make its special
66 produced element has the value ``"__init__"`` to make its special
67 purpose very explicit in the using code.
67 purpose very explicit in the using code.
68 """
68 """
69
69
70 @wraps(factory)
70 @wraps(factory)
71 def wrapper(*args, **kwargs):
71 def wrapper(*args, **kwargs):
72 gen = factory(*args, **kwargs)
72 gen = factory(*args, **kwargs)
73 try:
73 try:
74 init = gen.next()
74 init = gen.next()
75 except StopIteration:
75 except StopIteration:
76 raise ValueError('Generator must yield at least one element.')
76 raise ValueError('Generator must yield at least one element.')
77 if init != "__init__":
77 if init != "__init__":
78 raise ValueError('First yielded element must be "__init__".')
78 raise ValueError('First yielded element must be "__init__".')
79 return gen
79 return gen
80 return wrapper
80 return wrapper
81
81
82
82
83 class SimpleVCS(object):
83 class SimpleVCS(object):
84 """Common functionality for SCM HTTP handlers."""
84 """Common functionality for SCM HTTP handlers."""
85
85
86 SCM = 'unknown'
86 SCM = 'unknown'
87
87
88 acl_repo_name = None
88 acl_repo_name = None
89 url_repo_name = None
89 url_repo_name = None
90 vcs_repo_name = None
90 vcs_repo_name = None
91
91
92 # We have to handle requests to shadow repositories different than requests
92 # We have to handle requests to shadow repositories different than requests
93 # to normal repositories. Therefore we have to distinguish them. To do this
93 # to normal repositories. Therefore we have to distinguish them. To do this
94 # we use this regex which will match only on URLs pointing to shadow
94 # we use this regex which will match only on URLs pointing to shadow
95 # repositories.
95 # repositories.
96 shadow_repo_re = re.compile(
96 shadow_repo_re = re.compile(
97 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
97 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
98 '(?P<target>{slug_pat})/' # target repo
98 '(?P<target>{slug_pat})/' # target repo
99 'pull-request/(?P<pr_id>\d+)/' # pull request
99 'pull-request/(?P<pr_id>\d+)/' # pull request
100 'repository$' # shadow repo
100 'repository$' # shadow repo
101 .format(slug_pat=SLUG_RE.pattern))
101 .format(slug_pat=SLUG_RE.pattern))
102
102
103 def __init__(self, application, config, registry):
103 def __init__(self, application, config, registry):
104 self.registry = registry
104 self.registry = registry
105 self.application = application
105 self.application = application
106 self.config = config
106 self.config = config
107 # re-populated by specialized middleware
107 # re-populated by specialized middleware
108 self.repo_vcs_config = base.Config()
108 self.repo_vcs_config = base.Config()
109
109
110 # base path of repo locations
110 # base path of repo locations
111 self.basepath = get_rhodecode_base_path()
111 self.basepath = get_rhodecode_base_path()
112 # authenticate this VCS request using authfunc
112 # authenticate this VCS request using authfunc
113 auth_ret_code_detection = \
113 auth_ret_code_detection = \
114 str2bool(self.config.get('auth_ret_code_detection', False))
114 str2bool(self.config.get('auth_ret_code_detection', False))
115 self.authenticate = BasicAuth(
115 self.authenticate = BasicAuth(
116 '', authenticate, registry, config.get('auth_ret_code'),
116 '', authenticate, registry, config.get('auth_ret_code'),
117 auth_ret_code_detection)
117 auth_ret_code_detection)
118 self.ip_addr = '0.0.0.0'
118 self.ip_addr = '0.0.0.0'
119
119
120 def set_repo_names(self, environ):
120 def set_repo_names(self, environ):
121 """
121 """
122 This will populate the attributes acl_repo_name, url_repo_name,
122 This will populate the attributes acl_repo_name, url_repo_name,
123 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
123 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
124 shadow) repositories all names are equal. In case of requests to a
124 shadow) repositories all names are equal. In case of requests to a
125 shadow repository the acl-name points to the target repo of the pull
125 shadow repository the acl-name points to the target repo of the pull
126 request and the vcs-name points to the shadow repo file system path.
126 request and the vcs-name points to the shadow repo file system path.
127 The url-name is always the URL used by the vcs client program.
127 The url-name is always the URL used by the vcs client program.
128
128
129 Example in case of a shadow repo:
129 Example in case of a shadow repo:
130 acl_repo_name = RepoGroup/MyRepo
130 acl_repo_name = RepoGroup/MyRepo
131 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
131 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
132 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
132 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
133 """
133 """
134 # First we set the repo name from URL for all attributes. This is the
134 # First we set the repo name from URL for all attributes. This is the
135 # default if handling normal (non shadow) repo requests.
135 # default if handling normal (non shadow) repo requests.
136 self.url_repo_name = self._get_repository_name(environ)
136 self.url_repo_name = self._get_repository_name(environ)
137 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
137 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
138 self.is_shadow_repo = False
138 self.is_shadow_repo = False
139
139
140 # Check if this is a request to a shadow repository.
140 # Check if this is a request to a shadow repository.
141 match = self.shadow_repo_re.match(self.url_repo_name)
141 match = self.shadow_repo_re.match(self.url_repo_name)
142 if match:
142 if match:
143 match_dict = match.groupdict()
143 match_dict = match.groupdict()
144
144
145 # Build acl repo name from regex match.
145 # Build acl repo name from regex match.
146 acl_repo_name = safe_unicode('{groups}{target}'.format(
146 acl_repo_name = safe_unicode('{groups}{target}'.format(
147 groups=match_dict['groups'] or '',
147 groups=match_dict['groups'] or '',
148 target=match_dict['target']))
148 target=match_dict['target']))
149
149
150 # Retrieve pull request instance by ID from regex match.
150 # Retrieve pull request instance by ID from regex match.
151 pull_request = PullRequest.get(match_dict['pr_id'])
151 pull_request = PullRequest.get(match_dict['pr_id'])
152
152
153 # Only proceed if we got a pull request and if acl repo name from
153 # Only proceed if we got a pull request and if acl repo name from
154 # URL equals the target repo name of the pull request.
154 # URL equals the target repo name of the pull request.
155 if pull_request and (acl_repo_name ==
155 if pull_request and (acl_repo_name ==
156 pull_request.target_repo.repo_name):
156 pull_request.target_repo.repo_name):
157 # Get file system path to shadow repository.
157 # Get file system path to shadow repository.
158 workspace_id = PullRequestModel()._workspace_id(pull_request)
158 workspace_id = PullRequestModel()._workspace_id(pull_request)
159 target_vcs = pull_request.target_repo.scm_instance()
159 target_vcs = pull_request.target_repo.scm_instance()
160 vcs_repo_name = target_vcs._get_shadow_repository_path(
160 vcs_repo_name = target_vcs._get_shadow_repository_path(
161 workspace_id)
161 workspace_id)
162
162
163 # Store names for later usage.
163 # Store names for later usage.
164 self.vcs_repo_name = vcs_repo_name
164 self.vcs_repo_name = vcs_repo_name
165 self.acl_repo_name = acl_repo_name
165 self.acl_repo_name = acl_repo_name
166 self.is_shadow_repo = True
166 self.is_shadow_repo = True
167
167
168 log.debug('Setting all VCS repository names: %s', {
168 log.debug('Setting all VCS repository names: %s', {
169 'acl_repo_name': self.acl_repo_name,
169 'acl_repo_name': self.acl_repo_name,
170 'url_repo_name': self.url_repo_name,
170 'url_repo_name': self.url_repo_name,
171 'vcs_repo_name': self.vcs_repo_name,
171 'vcs_repo_name': self.vcs_repo_name,
172 })
172 })
173
173
174 @property
174 @property
175 def scm_app(self):
175 def scm_app(self):
176 custom_implementation = self.config['vcs.scm_app_implementation']
176 custom_implementation = self.config['vcs.scm_app_implementation']
177 if custom_implementation == 'http':
177 if custom_implementation == 'http':
178 log.info('Using HTTP implementation of scm app.')
178 log.info('Using HTTP implementation of scm app.')
179 scm_app_impl = scm_app_http
179 scm_app_impl = scm_app_http
180 elif custom_implementation == 'pyro4':
181 log.info('Using Pyro implementation of scm app.')
182 scm_app_impl = scm_app
183 else:
180 else:
184 log.info('Using custom implementation of scm_app: "{}"'.format(
181 log.info('Using custom implementation of scm_app: "{}"'.format(
185 custom_implementation))
182 custom_implementation))
186 scm_app_impl = importlib.import_module(custom_implementation)
183 scm_app_impl = importlib.import_module(custom_implementation)
187 return scm_app_impl
184 return scm_app_impl
188
185
189 def _get_by_id(self, repo_name):
186 def _get_by_id(self, repo_name):
190 """
187 """
191 Gets a special pattern _<ID> from clone url and tries to replace it
188 Gets a special pattern _<ID> from clone url and tries to replace it
192 with a repository_name for support of _<ID> non changeable urls
189 with a repository_name for support of _<ID> non changeable urls
193 """
190 """
194
191
195 data = repo_name.split('/')
192 data = repo_name.split('/')
196 if len(data) >= 2:
193 if len(data) >= 2:
197 from rhodecode.model.repo import RepoModel
194 from rhodecode.model.repo import RepoModel
198 by_id_match = RepoModel().get_repo_by_id(repo_name)
195 by_id_match = RepoModel().get_repo_by_id(repo_name)
199 if by_id_match:
196 if by_id_match:
200 data[1] = by_id_match.repo_name
197 data[1] = by_id_match.repo_name
201
198
202 return safe_str('/'.join(data))
199 return safe_str('/'.join(data))
203
200
204 def _invalidate_cache(self, repo_name):
201 def _invalidate_cache(self, repo_name):
205 """
202 """
206 Set's cache for this repository for invalidation on next access
203 Set's cache for this repository for invalidation on next access
207
204
208 :param repo_name: full repo name, also a cache key
205 :param repo_name: full repo name, also a cache key
209 """
206 """
210 ScmModel().mark_for_invalidation(repo_name)
207 ScmModel().mark_for_invalidation(repo_name)
211
208
212 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
209 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
213 db_repo = Repository.get_by_repo_name(repo_name)
210 db_repo = Repository.get_by_repo_name(repo_name)
214 if not db_repo:
211 if not db_repo:
215 log.debug('Repository `%s` not found inside the database.',
212 log.debug('Repository `%s` not found inside the database.',
216 repo_name)
213 repo_name)
217 return False
214 return False
218
215
219 if db_repo.repo_type != scm_type:
216 if db_repo.repo_type != scm_type:
220 log.warning(
217 log.warning(
221 'Repository `%s` have incorrect scm_type, expected %s got %s',
218 'Repository `%s` have incorrect scm_type, expected %s got %s',
222 repo_name, db_repo.repo_type, scm_type)
219 repo_name, db_repo.repo_type, scm_type)
223 return False
220 return False
224
221
225 return is_valid_repo(repo_name, base_path, explicit_scm=scm_type)
222 return is_valid_repo(repo_name, base_path, explicit_scm=scm_type)
226
223
227 def valid_and_active_user(self, user):
224 def valid_and_active_user(self, user):
228 """
225 """
229 Checks if that user is not empty, and if it's actually object it checks
226 Checks if that user is not empty, and if it's actually object it checks
230 if he's active.
227 if he's active.
231
228
232 :param user: user object or None
229 :param user: user object or None
233 :return: boolean
230 :return: boolean
234 """
231 """
235 if user is None:
232 if user is None:
236 return False
233 return False
237
234
238 elif user.active:
235 elif user.active:
239 return True
236 return True
240
237
241 return False
238 return False
242
239
243 def _check_permission(self, action, user, repo_name, ip_addr=None):
240 def _check_permission(self, action, user, repo_name, ip_addr=None):
244 """
241 """
245 Checks permissions using action (push/pull) user and repository
242 Checks permissions using action (push/pull) user and repository
246 name
243 name
247
244
248 :param action: push or pull action
245 :param action: push or pull action
249 :param user: user instance
246 :param user: user instance
250 :param repo_name: repository name
247 :param repo_name: repository name
251 """
248 """
252 # check IP
249 # check IP
253 inherit = user.inherit_default_permissions
250 inherit = user.inherit_default_permissions
254 ip_allowed = AuthUser.check_ip_allowed(user.user_id, ip_addr,
251 ip_allowed = AuthUser.check_ip_allowed(user.user_id, ip_addr,
255 inherit_from_default=inherit)
252 inherit_from_default=inherit)
256 if ip_allowed:
253 if ip_allowed:
257 log.info('Access for IP:%s allowed', ip_addr)
254 log.info('Access for IP:%s allowed', ip_addr)
258 else:
255 else:
259 return False
256 return False
260
257
261 if action == 'push':
258 if action == 'push':
262 if not HasPermissionAnyMiddleware('repository.write',
259 if not HasPermissionAnyMiddleware('repository.write',
263 'repository.admin')(user,
260 'repository.admin')(user,
264 repo_name):
261 repo_name):
265 return False
262 return False
266
263
267 else:
264 else:
268 # any other action need at least read permission
265 # any other action need at least read permission
269 if not HasPermissionAnyMiddleware('repository.read',
266 if not HasPermissionAnyMiddleware('repository.read',
270 'repository.write',
267 'repository.write',
271 'repository.admin')(user,
268 'repository.admin')(user,
272 repo_name):
269 repo_name):
273 return False
270 return False
274
271
275 return True
272 return True
276
273
277 def _check_ssl(self, environ, start_response):
274 def _check_ssl(self, environ, start_response):
278 """
275 """
279 Checks the SSL check flag and returns False if SSL is not present
276 Checks the SSL check flag and returns False if SSL is not present
280 and required True otherwise
277 and required True otherwise
281 """
278 """
282 org_proto = environ['wsgi._org_proto']
279 org_proto = environ['wsgi._org_proto']
283 # check if we have SSL required ! if not it's a bad request !
280 # check if we have SSL required ! if not it's a bad request !
284 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
281 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
285 if require_ssl and org_proto == 'http':
282 if require_ssl and org_proto == 'http':
286 log.debug('proto is %s and SSL is required BAD REQUEST !',
283 log.debug('proto is %s and SSL is required BAD REQUEST !',
287 org_proto)
284 org_proto)
288 return False
285 return False
289 return True
286 return True
290
287
291 def __call__(self, environ, start_response):
288 def __call__(self, environ, start_response):
292 try:
289 try:
293 return self._handle_request(environ, start_response)
290 return self._handle_request(environ, start_response)
294 except Exception:
291 except Exception:
295 log.exception("Exception while handling request")
292 log.exception("Exception while handling request")
296 appenlight.track_exception(environ)
293 appenlight.track_exception(environ)
297 return HTTPInternalServerError()(environ, start_response)
294 return HTTPInternalServerError()(environ, start_response)
298 finally:
295 finally:
299 meta.Session.remove()
296 meta.Session.remove()
300
297
301 def _handle_request(self, environ, start_response):
298 def _handle_request(self, environ, start_response):
302
299
303 if not self._check_ssl(environ, start_response):
300 if not self._check_ssl(environ, start_response):
304 reason = ('SSL required, while RhodeCode was unable '
301 reason = ('SSL required, while RhodeCode was unable '
305 'to detect this as SSL request')
302 'to detect this as SSL request')
306 log.debug('User not allowed to proceed, %s', reason)
303 log.debug('User not allowed to proceed, %s', reason)
307 return HTTPNotAcceptable(reason)(environ, start_response)
304 return HTTPNotAcceptable(reason)(environ, start_response)
308
305
309 if not self.url_repo_name:
306 if not self.url_repo_name:
310 log.warning('Repository name is empty: %s', self.url_repo_name)
307 log.warning('Repository name is empty: %s', self.url_repo_name)
311 # failed to get repo name, we fail now
308 # failed to get repo name, we fail now
312 return HTTPNotFound()(environ, start_response)
309 return HTTPNotFound()(environ, start_response)
313 log.debug('Extracted repo name is %s', self.url_repo_name)
310 log.debug('Extracted repo name is %s', self.url_repo_name)
314
311
315 ip_addr = get_ip_addr(environ)
312 ip_addr = get_ip_addr(environ)
316 username = None
313 username = None
317
314
318 # skip passing error to error controller
315 # skip passing error to error controller
319 environ['pylons.status_code_redirect'] = True
316 environ['pylons.status_code_redirect'] = True
320
317
321 # ======================================================================
318 # ======================================================================
322 # GET ACTION PULL or PUSH
319 # GET ACTION PULL or PUSH
323 # ======================================================================
320 # ======================================================================
324 action = self._get_action(environ)
321 action = self._get_action(environ)
325
322
326 # ======================================================================
323 # ======================================================================
327 # Check if this is a request to a shadow repository of a pull request.
324 # Check if this is a request to a shadow repository of a pull request.
328 # In this case only pull action is allowed.
325 # In this case only pull action is allowed.
329 # ======================================================================
326 # ======================================================================
330 if self.is_shadow_repo and action != 'pull':
327 if self.is_shadow_repo and action != 'pull':
331 reason = 'Only pull action is allowed for shadow repositories.'
328 reason = 'Only pull action is allowed for shadow repositories.'
332 log.debug('User not allowed to proceed, %s', reason)
329 log.debug('User not allowed to proceed, %s', reason)
333 return HTTPNotAcceptable(reason)(environ, start_response)
330 return HTTPNotAcceptable(reason)(environ, start_response)
334
331
335 # ======================================================================
332 # ======================================================================
336 # CHECK ANONYMOUS PERMISSION
333 # CHECK ANONYMOUS PERMISSION
337 # ======================================================================
334 # ======================================================================
338 if action in ['pull', 'push']:
335 if action in ['pull', 'push']:
339 anonymous_user = User.get_default_user()
336 anonymous_user = User.get_default_user()
340 username = anonymous_user.username
337 username = anonymous_user.username
341 if anonymous_user.active:
338 if anonymous_user.active:
342 # ONLY check permissions if the user is activated
339 # ONLY check permissions if the user is activated
343 anonymous_perm = self._check_permission(
340 anonymous_perm = self._check_permission(
344 action, anonymous_user, self.acl_repo_name, ip_addr)
341 action, anonymous_user, self.acl_repo_name, ip_addr)
345 else:
342 else:
346 anonymous_perm = False
343 anonymous_perm = False
347
344
348 if not anonymous_user.active or not anonymous_perm:
345 if not anonymous_user.active or not anonymous_perm:
349 if not anonymous_user.active:
346 if not anonymous_user.active:
350 log.debug('Anonymous access is disabled, running '
347 log.debug('Anonymous access is disabled, running '
351 'authentication')
348 'authentication')
352
349
353 if not anonymous_perm:
350 if not anonymous_perm:
354 log.debug('Not enough credentials to access this '
351 log.debug('Not enough credentials to access this '
355 'repository as anonymous user')
352 'repository as anonymous user')
356
353
357 username = None
354 username = None
358 # ==============================================================
355 # ==============================================================
359 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
356 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
360 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
357 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
361 # ==============================================================
358 # ==============================================================
362
359
363 # try to auth based on environ, container auth methods
360 # try to auth based on environ, container auth methods
364 log.debug('Running PRE-AUTH for container based authentication')
361 log.debug('Running PRE-AUTH for container based authentication')
365 pre_auth = authenticate(
362 pre_auth = authenticate(
366 '', '', environ, VCS_TYPE, registry=self.registry)
363 '', '', environ, VCS_TYPE, registry=self.registry)
367 if pre_auth and pre_auth.get('username'):
364 if pre_auth and pre_auth.get('username'):
368 username = pre_auth['username']
365 username = pre_auth['username']
369 log.debug('PRE-AUTH got %s as username', username)
366 log.debug('PRE-AUTH got %s as username', username)
370
367
371 # If not authenticated by the container, running basic auth
368 # If not authenticated by the container, running basic auth
372 if not username:
369 if not username:
373 self.authenticate.realm = get_rhodecode_realm()
370 self.authenticate.realm = get_rhodecode_realm()
374
371
375 try:
372 try:
376 result = self.authenticate(environ)
373 result = self.authenticate(environ)
377 except (UserCreationError, NotAllowedToCreateUserError) as e:
374 except (UserCreationError, NotAllowedToCreateUserError) as e:
378 log.error(e)
375 log.error(e)
379 reason = safe_str(e)
376 reason = safe_str(e)
380 return HTTPNotAcceptable(reason)(environ, start_response)
377 return HTTPNotAcceptable(reason)(environ, start_response)
381
378
382 if isinstance(result, str):
379 if isinstance(result, str):
383 AUTH_TYPE.update(environ, 'basic')
380 AUTH_TYPE.update(environ, 'basic')
384 REMOTE_USER.update(environ, result)
381 REMOTE_USER.update(environ, result)
385 username = result
382 username = result
386 else:
383 else:
387 return result.wsgi_application(environ, start_response)
384 return result.wsgi_application(environ, start_response)
388
385
389 # ==============================================================
386 # ==============================================================
390 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
387 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
391 # ==============================================================
388 # ==============================================================
392 user = User.get_by_username(username)
389 user = User.get_by_username(username)
393 if not self.valid_and_active_user(user):
390 if not self.valid_and_active_user(user):
394 return HTTPForbidden()(environ, start_response)
391 return HTTPForbidden()(environ, start_response)
395 username = user.username
392 username = user.username
396 user.update_lastactivity()
393 user.update_lastactivity()
397 meta.Session().commit()
394 meta.Session().commit()
398
395
399 # check user attributes for password change flag
396 # check user attributes for password change flag
400 user_obj = user
397 user_obj = user
401 if user_obj and user_obj.username != User.DEFAULT_USER and \
398 if user_obj and user_obj.username != User.DEFAULT_USER and \
402 user_obj.user_data.get('force_password_change'):
399 user_obj.user_data.get('force_password_change'):
403 reason = 'password change required'
400 reason = 'password change required'
404 log.debug('User not allowed to authenticate, %s', reason)
401 log.debug('User not allowed to authenticate, %s', reason)
405 return HTTPNotAcceptable(reason)(environ, start_response)
402 return HTTPNotAcceptable(reason)(environ, start_response)
406
403
407 # check permissions for this repository
404 # check permissions for this repository
408 perm = self._check_permission(
405 perm = self._check_permission(
409 action, user, self.acl_repo_name, ip_addr)
406 action, user, self.acl_repo_name, ip_addr)
410 if not perm:
407 if not perm:
411 return HTTPForbidden()(environ, start_response)
408 return HTTPForbidden()(environ, start_response)
412
409
413 # extras are injected into UI object and later available
410 # extras are injected into UI object and later available
414 # in hooks executed by rhodecode
411 # in hooks executed by rhodecode
415 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
412 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
416 extras = vcs_operation_context(
413 extras = vcs_operation_context(
417 environ, repo_name=self.acl_repo_name, username=username,
414 environ, repo_name=self.acl_repo_name, username=username,
418 action=action, scm=self.SCM, check_locking=check_locking,
415 action=action, scm=self.SCM, check_locking=check_locking,
419 is_shadow_repo=self.is_shadow_repo
416 is_shadow_repo=self.is_shadow_repo
420 )
417 )
421
418
422 # ======================================================================
419 # ======================================================================
423 # REQUEST HANDLING
420 # REQUEST HANDLING
424 # ======================================================================
421 # ======================================================================
425 repo_path = os.path.join(
422 repo_path = os.path.join(
426 safe_str(self.basepath), safe_str(self.vcs_repo_name))
423 safe_str(self.basepath), safe_str(self.vcs_repo_name))
427 log.debug('Repository path is %s', repo_path)
424 log.debug('Repository path is %s', repo_path)
428
425
429 fix_PATH()
426 fix_PATH()
430
427
431 log.info(
428 log.info(
432 '%s action on %s repo "%s" by "%s" from %s',
429 '%s action on %s repo "%s" by "%s" from %s',
433 action, self.SCM, safe_str(self.url_repo_name),
430 action, self.SCM, safe_str(self.url_repo_name),
434 safe_str(username), ip_addr)
431 safe_str(username), ip_addr)
435
432
436 return self._generate_vcs_response(
433 return self._generate_vcs_response(
437 environ, start_response, repo_path, extras, action)
434 environ, start_response, repo_path, extras, action)
438
435
439 @initialize_generator
436 @initialize_generator
440 def _generate_vcs_response(
437 def _generate_vcs_response(
441 self, environ, start_response, repo_path, extras, action):
438 self, environ, start_response, repo_path, extras, action):
442 """
439 """
443 Returns a generator for the response content.
440 Returns a generator for the response content.
444
441
445 This method is implemented as a generator, so that it can trigger
442 This method is implemented as a generator, so that it can trigger
446 the cache validation after all content sent back to the client. It
443 the cache validation after all content sent back to the client. It
447 also handles the locking exceptions which will be triggered when
444 also handles the locking exceptions which will be triggered when
448 the first chunk is produced by the underlying WSGI application.
445 the first chunk is produced by the underlying WSGI application.
449 """
446 """
450 callback_daemon, extras = self._prepare_callback_daemon(extras)
447 callback_daemon, extras = self._prepare_callback_daemon(extras)
451 config = self._create_config(extras, self.acl_repo_name)
448 config = self._create_config(extras, self.acl_repo_name)
452 log.debug('HOOKS extras is %s', extras)
449 log.debug('HOOKS extras is %s', extras)
453 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
450 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
454
451
455 try:
452 try:
456 with callback_daemon:
453 with callback_daemon:
457 try:
454 try:
458 response = app(environ, start_response)
455 response = app(environ, start_response)
459 finally:
456 finally:
460 # This statement works together with the decorator
457 # This statement works together with the decorator
461 # "initialize_generator" above. The decorator ensures that
458 # "initialize_generator" above. The decorator ensures that
462 # we hit the first yield statement before the generator is
459 # we hit the first yield statement before the generator is
463 # returned back to the WSGI server. This is needed to
460 # returned back to the WSGI server. This is needed to
464 # ensure that the call to "app" above triggers the
461 # ensure that the call to "app" above triggers the
465 # needed callback to "start_response" before the
462 # needed callback to "start_response" before the
466 # generator is actually used.
463 # generator is actually used.
467 yield "__init__"
464 yield "__init__"
468
465
469 for chunk in response:
466 for chunk in response:
470 yield chunk
467 yield chunk
471 except Exception as exc:
468 except Exception as exc:
472 # TODO: martinb: Exceptions are only raised in case of the Pyro4
469 # TODO: martinb: Exceptions are only raised in case of the Pyro4
473 # backend. Refactor this except block after dropping Pyro4 support.
470 # backend. Refactor this except block after dropping Pyro4 support.
474 # TODO: johbo: Improve "translating" back the exception.
471 # TODO: johbo: Improve "translating" back the exception.
475 if getattr(exc, '_vcs_kind', None) == 'repo_locked':
472 if getattr(exc, '_vcs_kind', None) == 'repo_locked':
476 exc = HTTPLockedRC(*exc.args)
473 exc = HTTPLockedRC(*exc.args)
477 _code = rhodecode.CONFIG.get('lock_ret_code')
474 _code = rhodecode.CONFIG.get('lock_ret_code')
478 log.debug('Repository LOCKED ret code %s!', (_code,))
475 log.debug('Repository LOCKED ret code %s!', (_code,))
479 elif getattr(exc, '_vcs_kind', None) == 'requirement':
476 elif getattr(exc, '_vcs_kind', None) == 'requirement':
480 log.debug(
477 log.debug(
481 'Repository requires features unknown to this Mercurial')
478 'Repository requires features unknown to this Mercurial')
482 exc = HTTPRequirementError(*exc.args)
479 exc = HTTPRequirementError(*exc.args)
483 else:
480 else:
484 raise
481 raise
485
482
486 for chunk in exc(environ, start_response):
483 for chunk in exc(environ, start_response):
487 yield chunk
484 yield chunk
488 finally:
485 finally:
489 # invalidate cache on push
486 # invalidate cache on push
490 try:
487 try:
491 if action == 'push':
488 if action == 'push':
492 self._invalidate_cache(self.url_repo_name)
489 self._invalidate_cache(self.url_repo_name)
493 finally:
490 finally:
494 meta.Session.remove()
491 meta.Session.remove()
495
492
496 def _get_repository_name(self, environ):
493 def _get_repository_name(self, environ):
497 """Get repository name out of the environmnent
494 """Get repository name out of the environmnent
498
495
499 :param environ: WSGI environment
496 :param environ: WSGI environment
500 """
497 """
501 raise NotImplementedError()
498 raise NotImplementedError()
502
499
503 def _get_action(self, environ):
500 def _get_action(self, environ):
504 """Map request commands into a pull or push command.
501 """Map request commands into a pull or push command.
505
502
506 :param environ: WSGI environment
503 :param environ: WSGI environment
507 """
504 """
508 raise NotImplementedError()
505 raise NotImplementedError()
509
506
510 def _create_wsgi_app(self, repo_path, repo_name, config):
507 def _create_wsgi_app(self, repo_path, repo_name, config):
511 """Return the WSGI app that will finally handle the request."""
508 """Return the WSGI app that will finally handle the request."""
512 raise NotImplementedError()
509 raise NotImplementedError()
513
510
514 def _create_config(self, extras, repo_name):
511 def _create_config(self, extras, repo_name):
515 """Create a Pyro safe config representation."""
512 """Create a safe config representation."""
516 raise NotImplementedError()
513 raise NotImplementedError()
517
514
518 def _prepare_callback_daemon(self, extras):
515 def _prepare_callback_daemon(self, extras):
519 return prepare_callback_daemon(
516 return prepare_callback_daemon(
520 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
517 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
521 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
518 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
522
519
523
520
524 def _should_check_locking(query_string):
521 def _should_check_locking(query_string):
525 # this is kind of hacky, but due to how mercurial handles client-server
522 # this is kind of hacky, but due to how mercurial handles client-server
526 # server see all operation on commit; bookmarks, phases and
523 # server see all operation on commit; bookmarks, phases and
527 # obsolescence marker in different transaction, we don't want to check
524 # obsolescence marker in different transaction, we don't want to check
528 # locking on those
525 # locking on those
529 return query_string not in ['cmd=listkeys']
526 return query_string not in ['cmd=listkeys']
@@ -1,63 +1,63 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Equivalent of rhodecode.lib.middleware.scm_app but using remote apps.
22 Equivalent of rhodecode.lib.middleware.scm_app but using remote apps.
23 """
23 """
24
24
25 import logging
25 import logging
26
26
27 from rhodecode.lib.middleware.utils import wsgi_app_caller_client
27 from rhodecode.lib.middleware.utils import wsgi_app_caller_client
28
28
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 HG_REMOTE_WSGI = None
33 HG_REMOTE_WSGI = None
34 GIT_REMOTE_WSGI = None
34 GIT_REMOTE_WSGI = None
35
35
36
36
37 def create_git_wsgi_app(repo_path, repo_name, config):
37 def create_git_wsgi_app(repo_path, repo_name, config):
38 """
38 """
39 Return a WSGI app backed by a remote app to handle Git.
39 Return a WSGI app backed by a remote app to handle Git.
40
40
41 config is a dictionary holding the extras.
41 config is a dictionary holding the extras.
42 """
42 """
43 factory = GIT_REMOTE_WSGI
43 factory = GIT_REMOTE_WSGI
44 if not factory:
44 if not factory:
45 log.error('Pyro server has not been initialized yet')
45 log.error('VCSServer has not been initialized yet')
46
46
47 return wsgi_app_caller_client.RemoteAppCaller(
47 return wsgi_app_caller_client.RemoteAppCaller(
48 factory, repo_path, repo_name, config)
48 factory, repo_path, repo_name, config)
49
49
50
50
51 def create_hg_wsgi_app(repo_path, repo_name, config):
51 def create_hg_wsgi_app(repo_path, repo_name, config):
52 """
52 """
53 Return a WSGI app backed by a remote app to handle Mercurial.
53 Return a WSGI app backed by a remote app to handle Mercurial.
54
54
55 config is a list of 3-item tuples representing a ConfigObject (it is the
55 config is a list of 3-item tuples representing a ConfigObject (it is the
56 serialized version of the config object).
56 serialized version of the config object).
57 """
57 """
58 factory = HG_REMOTE_WSGI
58 factory = HG_REMOTE_WSGI
59 if not factory:
59 if not factory:
60 log.error('Pyro server has not been initialized yet')
60 log.error('VCSServer has not been initialized yet')
61
61
62 return wsgi_app_caller_client.RemoteAppCaller(
62 return wsgi_app_caller_client.RemoteAppCaller(
63 factory, repo_path, repo_name, config)
63 factory, repo_path, repo_name, config)
@@ -1,98 +1,90 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utility to call a WSGI app wrapped in a WSGIAppCaller object.
22 Utility to call a WSGI app wrapped in a WSGIAppCaller object.
23 """
23 """
24
24
25 import logging
25 import logging
26
26
27 from Pyro4.errors import ConnectionClosedError
28
29
27
30 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
31
29
32
30
33 def _get_clean_environ(environ):
31 def _get_clean_environ(environ):
34 """Return a copy of the WSGI environment without wsgi.* keys.
32 """Return a copy of the WSGI environment without wsgi.* keys.
35
33
36 It also omits any non-string values.
34 It also omits any non-string values.
37
35
38 :param environ: WSGI environment to clean
36 :param environ: WSGI environment to clean
39 :type environ: dict
37 :type environ: dict
40
38
41 :returns: WSGI environment to pass to WSGIAppCaller.handle.
39 :returns: WSGI environment to pass to WSGIAppCaller.handle.
42 :rtype: dict
40 :rtype: dict
43 """
41 """
44 clean_environ = dict(
42 clean_environ = dict(
45 (k, v) for k, v in environ.iteritems()
43 (k, v) for k, v in environ.iteritems()
46 if type(v) == str and type(k) == str and not k.startswith('wsgi.')
44 if type(v) == str and type(k) == str and not k.startswith('wsgi.')
47 )
45 )
48
46
49 return clean_environ
47 return clean_environ
50
48
51
49
52 # pylint: disable=too-few-public-methods
50 # pylint: disable=too-few-public-methods
53 class RemoteAppCaller(object):
51 class RemoteAppCaller(object):
54 """Create and calls a remote WSGI app using the given factory.
52 """Create and calls a remote WSGI app using the given factory.
55
53
56 It first cleans the environment, so as to reduce the data transferred.
54 It first cleans the environment, so as to reduce the data transferred.
57 """
55 """
58
56
59 def __init__(self, remote_wsgi, *args, **kwargs):
57 def __init__(self, remote_wsgi, *args, **kwargs):
60 """
58 """
61 :param remote_wsgi: The remote wsgi object that creates a
59 :param remote_wsgi: The remote wsgi object that creates a
62 WSGIAppCaller. This object
60 WSGIAppCaller. This object
63 has to have a handle method, with the signature:
61 has to have a handle method, with the signature:
64 handle(environ, start_response, *args, **kwargs)
62 handle(environ, start_response, *args, **kwargs)
65 :param args: args to be passed to the app creation
63 :param args: args to be passed to the app creation
66 :param kwargs: kwargs to be passed to the app creation
64 :param kwargs: kwargs to be passed to the app creation
67 """
65 """
68 self._remote_wsgi = remote_wsgi
66 self._remote_wsgi = remote_wsgi
69 self._args = args
67 self._args = args
70 self._kwargs = kwargs
68 self._kwargs = kwargs
71
69
72 def __call__(self, environ, start_response):
70 def __call__(self, environ, start_response):
73 """
71 """
74 :param environ: WSGI environment with which the app will be run
72 :param environ: WSGI environment with which the app will be run
75 :type environ: dict
73 :type environ: dict
76 :param start_response: callable of WSGI protocol
74 :param start_response: callable of WSGI protocol
77 :type start_response: callable
75 :type start_response: callable
78
76
79 :returns: an iterable with the data returned by the app
77 :returns: an iterable with the data returned by the app
80 :rtype: iterable<str>
78 :rtype: iterable<str>
81 """
79 """
82 log.debug("Forwarding WSGI request via proxy %s", self._remote_wsgi)
80 log.debug("Forwarding WSGI request via proxy %s", self._remote_wsgi)
83 input_data = environ['wsgi.input'].read()
81 input_data = environ['wsgi.input'].read()
84 clean_environ = _get_clean_environ(environ)
82 clean_environ = _get_clean_environ(environ)
85
83
86 try:
84 data, status, headers = self._remote_wsgi.handle(
87 data, status, headers = self._remote_wsgi.handle(
85 clean_environ, input_data, *self._args, **self._kwargs)
88 clean_environ, input_data, *self._args, **self._kwargs)
89 except ConnectionClosedError:
90 log.debug('Remote Pyro Server ConnectionClosedError')
91 self._remote_wsgi._pyroReconnect(tries=15)
92 data, status, headers = self._remote_wsgi.handle(
93 clean_environ, input_data, *self._args, **self._kwargs)
94
86
95 log.debug("Got result from proxy, returning to WSGI container")
87 log.debug("Got result from proxy, returning to WSGI container")
96 start_response(status, headers)
88 start_response(status, headers)
97
89
98 return data
90 return data
@@ -1,235 +1,233 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import gzip
21 import gzip
22 import shutil
22 import shutil
23 import logging
23 import logging
24 import tempfile
24 import tempfile
25 import urlparse
25 import urlparse
26
26
27 from webob.exc import HTTPNotFound
27 from webob.exc import HTTPNotFound
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
30 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
31 from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT
31 from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT
32 from rhodecode.lib.middleware.simplehg import SimpleHg
32 from rhodecode.lib.middleware.simplehg import SimpleHg
33 from rhodecode.lib.middleware.simplesvn import SimpleSvn
33 from rhodecode.lib.middleware.simplesvn import SimpleSvn
34 from rhodecode.model.settings import VcsSettingsModel
34 from rhodecode.model.settings import VcsSettingsModel
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38 VCS_TYPE_KEY = '_rc_vcs_type'
38 VCS_TYPE_KEY = '_rc_vcs_type'
39 VCS_TYPE_SKIP = '_rc_vcs_skip'
39 VCS_TYPE_SKIP = '_rc_vcs_skip'
40
40
41
41
42 def is_git(environ):
42 def is_git(environ):
43 """
43 """
44 Returns True if requests should be handled by GIT wsgi middleware
44 Returns True if requests should be handled by GIT wsgi middleware
45 """
45 """
46 is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO'])
46 is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO'])
47 log.debug(
47 log.debug(
48 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'],
48 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'],
49 is_git_path is not None)
49 is_git_path is not None)
50
50
51 return is_git_path
51 return is_git_path
52
52
53
53
54 def is_hg(environ):
54 def is_hg(environ):
55 """
55 """
56 Returns True if requests target is mercurial server - header
56 Returns True if requests target is mercurial server - header
57 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
57 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
58 """
58 """
59 is_hg_path = False
59 is_hg_path = False
60
60
61 http_accept = environ.get('HTTP_ACCEPT')
61 http_accept = environ.get('HTTP_ACCEPT')
62
62
63 if http_accept and http_accept.startswith('application/mercurial'):
63 if http_accept and http_accept.startswith('application/mercurial'):
64 query = urlparse.parse_qs(environ['QUERY_STRING'])
64 query = urlparse.parse_qs(environ['QUERY_STRING'])
65 if 'cmd' in query:
65 if 'cmd' in query:
66 is_hg_path = True
66 is_hg_path = True
67
67
68 log.debug(
68 log.debug(
69 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'],
69 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'],
70 is_hg_path)
70 is_hg_path)
71
71
72 return is_hg_path
72 return is_hg_path
73
73
74
74
75 def is_svn(environ):
75 def is_svn(environ):
76 """
76 """
77 Returns True if requests target is Subversion server
77 Returns True if requests target is Subversion server
78 """
78 """
79 http_dav = environ.get('HTTP_DAV', '')
79 http_dav = environ.get('HTTP_DAV', '')
80 magic_path_segment = rhodecode.CONFIG.get(
80 magic_path_segment = rhodecode.CONFIG.get(
81 'rhodecode_subversion_magic_path', '/!svn')
81 'rhodecode_subversion_magic_path', '/!svn')
82 is_svn_path = (
82 is_svn_path = (
83 'subversion' in http_dav or
83 'subversion' in http_dav or
84 magic_path_segment in environ['PATH_INFO'])
84 magic_path_segment in environ['PATH_INFO'])
85 log.debug(
85 log.debug(
86 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'],
86 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'],
87 is_svn_path)
87 is_svn_path)
88
88
89 return is_svn_path
89 return is_svn_path
90
90
91
91
92 class GunzipMiddleware(object):
92 class GunzipMiddleware(object):
93 """
93 """
94 WSGI middleware that unzips gzip-encoded requests before
94 WSGI middleware that unzips gzip-encoded requests before
95 passing on to the underlying application.
95 passing on to the underlying application.
96 """
96 """
97
97
98 def __init__(self, application):
98 def __init__(self, application):
99 self.app = application
99 self.app = application
100
100
101 def __call__(self, environ, start_response):
101 def __call__(self, environ, start_response):
102 accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'')
102 accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'')
103
103
104 if b'gzip' in accepts_encoding_header:
104 if b'gzip' in accepts_encoding_header:
105 log.debug('gzip detected, now running gunzip wrapper')
105 log.debug('gzip detected, now running gunzip wrapper')
106 wsgi_input = environ['wsgi.input']
106 wsgi_input = environ['wsgi.input']
107
107
108 if not hasattr(environ['wsgi.input'], 'seek'):
108 if not hasattr(environ['wsgi.input'], 'seek'):
109 # The gzip implementation in the standard library of Python 2.x
109 # The gzip implementation in the standard library of Python 2.x
110 # requires the '.seek()' and '.tell()' methods to be available
110 # requires the '.seek()' and '.tell()' methods to be available
111 # on the input stream. Read the data into a temporary file to
111 # on the input stream. Read the data into a temporary file to
112 # work around this limitation.
112 # work around this limitation.
113
113
114 wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024)
114 wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024)
115 shutil.copyfileobj(environ['wsgi.input'], wsgi_input)
115 shutil.copyfileobj(environ['wsgi.input'], wsgi_input)
116 wsgi_input.seek(0)
116 wsgi_input.seek(0)
117
117
118 environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r')
118 environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r')
119 # since we "Ungzipped" the content we say now it's no longer gzip
119 # since we "Ungzipped" the content we say now it's no longer gzip
120 # content encoding
120 # content encoding
121 del environ['HTTP_CONTENT_ENCODING']
121 del environ['HTTP_CONTENT_ENCODING']
122
122
123 # content length has changes ? or i'm not sure
123 # content length has changes ? or i'm not sure
124 if 'CONTENT_LENGTH' in environ:
124 if 'CONTENT_LENGTH' in environ:
125 del environ['CONTENT_LENGTH']
125 del environ['CONTENT_LENGTH']
126 else:
126 else:
127 log.debug('content not gzipped, gzipMiddleware passing '
127 log.debug('content not gzipped, gzipMiddleware passing '
128 'request further')
128 'request further')
129 return self.app(environ, start_response)
129 return self.app(environ, start_response)
130
130
131
131
132 def is_vcs_call(environ):
132 def is_vcs_call(environ):
133 if VCS_TYPE_KEY in environ:
133 if VCS_TYPE_KEY in environ:
134 raw_type = environ[VCS_TYPE_KEY]
134 raw_type = environ[VCS_TYPE_KEY]
135 return raw_type and raw_type != VCS_TYPE_SKIP
135 return raw_type and raw_type != VCS_TYPE_SKIP
136 return False
136 return False
137
137
138
138
139 def detect_vcs_request(environ, backends):
139 def detect_vcs_request(environ, backends):
140 checks = {
140 checks = {
141 'hg': (is_hg, SimpleHg),
141 'hg': (is_hg, SimpleHg),
142 'git': (is_git, SimpleGit),
142 'git': (is_git, SimpleGit),
143 'svn': (is_svn, SimpleSvn),
143 'svn': (is_svn, SimpleSvn),
144 }
144 }
145 handler = None
145 handler = None
146
146
147 if VCS_TYPE_KEY in environ:
147 if VCS_TYPE_KEY in environ:
148 raw_type = environ[VCS_TYPE_KEY]
148 raw_type = environ[VCS_TYPE_KEY]
149 if raw_type == VCS_TYPE_SKIP:
149 if raw_type == VCS_TYPE_SKIP:
150 log.debug('got `skip` marker for vcs detection, skipping...')
150 log.debug('got `skip` marker for vcs detection, skipping...')
151 return handler
151 return handler
152
152
153 _check, handler = checks.get(raw_type) or [None, None]
153 _check, handler = checks.get(raw_type) or [None, None]
154 if handler:
154 if handler:
155 log.debug('got handler:%s from environ', handler)
155 log.debug('got handler:%s from environ', handler)
156
156
157 if not handler:
157 if not handler:
158 log.debug('checking if request is of VCS type in order: %s', backends)
158 log.debug('checking if request is of VCS type in order: %s', backends)
159 for vcs_type in backends:
159 for vcs_type in backends:
160 vcs_check, _handler = checks[vcs_type]
160 vcs_check, _handler = checks[vcs_type]
161 if vcs_check(environ):
161 if vcs_check(environ):
162 log.debug('vcs handler found %s', _handler)
162 log.debug('vcs handler found %s', _handler)
163 handler = _handler
163 handler = _handler
164 break
164 break
165
165
166 return handler
166 return handler
167
167
168
168
169 class VCSMiddleware(object):
169 class VCSMiddleware(object):
170
170
171 def __init__(self, app, config, appenlight_client, registry):
171 def __init__(self, app, config, appenlight_client, registry):
172 self.application = app
172 self.application = app
173 self.config = config
173 self.config = config
174 self.appenlight_client = appenlight_client
174 self.appenlight_client = appenlight_client
175 self.registry = registry
175 self.registry = registry
176 self.use_gzip = True
176 self.use_gzip = True
177 # order in which we check the middlewares, based on vcs.backends config
177 # order in which we check the middlewares, based on vcs.backends config
178 self.check_middlewares = config['vcs.backends']
178 self.check_middlewares = config['vcs.backends']
179
179
180 def vcs_config(self, repo_name=None):
180 def vcs_config(self, repo_name=None):
181 """
181 """
182 returns serialized VcsSettings
182 returns serialized VcsSettings
183 """
183 """
184 return VcsSettingsModel(repo=repo_name).get_ui_settings_as_config_obj()
184 return VcsSettingsModel(repo=repo_name).get_ui_settings_as_config_obj()
185
185
186 def wrap_in_gzip_if_enabled(self, app, config):
186 def wrap_in_gzip_if_enabled(self, app, config):
187 if self.use_gzip:
187 if self.use_gzip:
188 app = GunzipMiddleware(app)
188 app = GunzipMiddleware(app)
189 return app
189 return app
190
190
191 def _get_handler_app(self, environ):
191 def _get_handler_app(self, environ):
192 app = None
192 app = None
193 log.debug('VCSMiddleware: detecting vcs type.')
193 log.debug('VCSMiddleware: detecting vcs type.')
194 handler = detect_vcs_request(environ, self.check_middlewares)
194 handler = detect_vcs_request(environ, self.check_middlewares)
195 if handler:
195 if handler:
196 app = handler(self.application, self.config, self.registry)
196 app = handler(self.application, self.config, self.registry)
197
197
198 return app
198 return app
199
199
200 def __call__(self, environ, start_response):
200 def __call__(self, environ, start_response):
201 # check if we handle one of interesting protocols, optionally extract
201 # check if we handle one of interesting protocols, optionally extract
202 # specific vcsSettings and allow changes of how things are wrapped
202 # specific vcsSettings and allow changes of how things are wrapped
203 vcs_handler = self._get_handler_app(environ)
203 vcs_handler = self._get_handler_app(environ)
204 if vcs_handler:
204 if vcs_handler:
205 # translate the _REPO_ID into real repo NAME for usage
205 # translate the _REPO_ID into real repo NAME for usage
206 # in middleware
206 # in middleware
207 environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO'])
207 environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO'])
208
208
209 # Set acl, url and vcs repo names.
209 # Set acl, url and vcs repo names.
210 vcs_handler.set_repo_names(environ)
210 vcs_handler.set_repo_names(environ)
211
211
212 # check for type, presence in database and on filesystem
212 # check for type, presence in database and on filesystem
213 if not vcs_handler.is_valid_and_existing_repo(
213 if not vcs_handler.is_valid_and_existing_repo(
214 vcs_handler.acl_repo_name,
214 vcs_handler.acl_repo_name,
215 vcs_handler.basepath,
215 vcs_handler.basepath,
216 vcs_handler.SCM):
216 vcs_handler.SCM):
217 return HTTPNotFound()(environ, start_response)
217 return HTTPNotFound()(environ, start_response)
218
218
219 # TODO: johbo: Needed for the Pyro4 backend and Mercurial only.
220 # Remove once we fully switched to the HTTP backend.
221 environ['REPO_NAME'] = vcs_handler.url_repo_name
219 environ['REPO_NAME'] = vcs_handler.url_repo_name
222
220
223 # register repo config back to the handler
221 # register repo config back to the handler
224 vcs_handler.repo_vcs_config = self.vcs_config(
222 vcs_handler.repo_vcs_config = self.vcs_config(
225 vcs_handler.acl_repo_name)
223 vcs_handler.acl_repo_name)
226
224
227 # Wrap handler in middlewares if they are enabled.
225 # Wrap handler in middlewares if they are enabled.
228 vcs_handler = self.wrap_in_gzip_if_enabled(
226 vcs_handler = self.wrap_in_gzip_if_enabled(
229 vcs_handler, self.config)
227 vcs_handler, self.config)
230 vcs_handler, _ = wrap_in_appenlight_if_enabled(
228 vcs_handler, _ = wrap_in_appenlight_if_enabled(
231 vcs_handler, self.config, self.appenlight_client)
229 vcs_handler, self.config, self.appenlight_client)
232
230
233 return vcs_handler(environ, start_response)
231 return vcs_handler(environ, start_response)
234
232
235 return self.application(environ, start_response)
233 return self.application(environ, start_response)
@@ -1,304 +1,241 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2017 RhodeCode GmbH
3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Various version Control System version lib (vcs) management abstraction layer
22 Various version Control System version lib (vcs) management abstraction layer
23 for Python. Build with server client architecture.
23 for Python. Build with server client architecture.
24 """
24 """
25
25
26
26
27 VERSION = (0, 5, 0, 'dev')
27 VERSION = (0, 5, 0, 'dev')
28
28
29 __version__ = '.'.join((str(each) for each in VERSION[:4]))
29 __version__ = '.'.join((str(each) for each in VERSION[:4]))
30
30
31 __all__ = [
31 __all__ = [
32 'get_version', 'get_vcs_instance', 'get_backend',
32 'get_version', 'get_vcs_instance', 'get_backend',
33 'VCSError', 'RepositoryError', 'CommitError'
33 'VCSError', 'RepositoryError', 'CommitError'
34 ]
34 ]
35
35
36 import atexit
36 import atexit
37 import logging
37 import logging
38 import subprocess32
38 import subprocess32
39 import time
39 import time
40 import urlparse
40 import urlparse
41 from cStringIO import StringIO
41 from cStringIO import StringIO
42
42
43 import Pyro4
44 from Pyro4.errors import CommunicationError
45
43
46 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
47 from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend
45 from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend
48 from rhodecode.lib.vcs.exceptions import (
46 from rhodecode.lib.vcs.exceptions import (
49 VCSError, RepositoryError, CommitError, VCSCommunicationError)
47 VCSError, RepositoryError, CommitError, VCSCommunicationError)
50
48
51 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
52
50
53 # The pycurl library directly accesses C API functions and is not patched by
51 # The pycurl library directly accesses C API functions and is not patched by
54 # gevent. This will potentially lead to deadlocks due to incompatibility to
52 # gevent. This will potentially lead to deadlocks due to incompatibility to
55 # gevent. Therefore we check if gevent is active and import a gevent compatible
53 # gevent. Therefore we check if gevent is active and import a gevent compatible
56 # wrapper in that case.
54 # wrapper in that case.
57 try:
55 try:
58 from gevent import monkey
56 from gevent import monkey
59 if monkey.is_module_patched('__builtin__'):
57 if monkey.is_module_patched('__builtin__'):
60 import geventcurl as pycurl
58 import geventcurl as pycurl
61 log.debug('Using gevent comapatible pycurl: %s', pycurl)
59 log.debug('Using gevent comapatible pycurl: %s', pycurl)
62 else:
60 else:
63 import pycurl
61 import pycurl
64 except ImportError:
62 except ImportError:
65 import pycurl
63 import pycurl
66
64
67
65
68 def get_version():
66 def get_version():
69 """
67 """
70 Returns shorter version (digit parts only) as string.
68 Returns shorter version (digit parts only) as string.
71 """
69 """
72 return '.'.join((str(each) for each in VERSION[:3]))
70 return '.'.join((str(each) for each in VERSION[:3]))
73
71
74
72
75 def connect_pyro4(server_and_port):
76 from rhodecode.lib.vcs import connection, client
77 from rhodecode.lib.middleware.utils import scm_app
78
79 git_remote = client.RequestScopeProxyFactory(
80 settings.pyro_remote(settings.PYRO_GIT, server_and_port))
81 hg_remote = client.RequestScopeProxyFactory(
82 settings.pyro_remote(settings.PYRO_HG, server_and_port))
83 svn_remote = client.RequestScopeProxyFactory(
84 settings.pyro_remote(settings.PYRO_SVN, server_and_port))
85
86 connection.Git = client.RepoMaker(proxy_factory=git_remote)
87 connection.Hg = client.RepoMaker(proxy_factory=hg_remote)
88 connection.Svn = client.RepoMaker(proxy_factory=svn_remote)
89
90 scm_app.GIT_REMOTE_WSGI = Pyro4.Proxy(
91 settings.pyro_remote(
92 settings.PYRO_GIT_REMOTE_WSGI, server_and_port))
93 scm_app.HG_REMOTE_WSGI = Pyro4.Proxy(
94 settings.pyro_remote(
95 settings.PYRO_HG_REMOTE_WSGI, server_and_port))
96
97 @atexit.register
98 def free_connection_resources():
99 connection.Git = None
100 connection.Hg = None
101 connection.Svn = None
102 connection.Service = None
103
104
105 def connect_http(server_and_port):
73 def connect_http(server_and_port):
106 from rhodecode.lib.vcs import connection, client_http
74 from rhodecode.lib.vcs import connection, client_http
107 from rhodecode.lib.middleware.utils import scm_app
75 from rhodecode.lib.middleware.utils import scm_app
108
76
109 session_factory = client_http.ThreadlocalSessionFactory()
77 session_factory = client_http.ThreadlocalSessionFactory()
110
78
111 connection.Git = client_http.RepoMaker(
79 connection.Git = client_http.RepoMaker(
112 server_and_port, '/git', 'git', session_factory)
80 server_and_port, '/git', 'git', session_factory)
113 connection.Hg = client_http.RepoMaker(
81 connection.Hg = client_http.RepoMaker(
114 server_and_port, '/hg', 'hg', session_factory)
82 server_and_port, '/hg', 'hg', session_factory)
115 connection.Svn = client_http.RepoMaker(
83 connection.Svn = client_http.RepoMaker(
116 server_and_port, '/svn', 'svn', session_factory)
84 server_and_port, '/svn', 'svn', session_factory)
117 connection.Service = client_http.ServiceConnection(
85 connection.Service = client_http.ServiceConnection(
118 server_and_port, '/_service', session_factory)
86 server_and_port, '/_service', session_factory)
119
87
120 scm_app.HG_REMOTE_WSGI = client_http.VcsHttpProxy(
88 scm_app.HG_REMOTE_WSGI = client_http.VcsHttpProxy(
121 server_and_port, '/proxy/hg')
89 server_and_port, '/proxy/hg')
122 scm_app.GIT_REMOTE_WSGI = client_http.VcsHttpProxy(
90 scm_app.GIT_REMOTE_WSGI = client_http.VcsHttpProxy(
123 server_and_port, '/proxy/git')
91 server_and_port, '/proxy/git')
124
92
125 @atexit.register
93 @atexit.register
126 def free_connection_resources():
94 def free_connection_resources():
127 connection.Git = None
95 connection.Git = None
128 connection.Hg = None
96 connection.Hg = None
129 connection.Svn = None
97 connection.Svn = None
130 connection.Service = None
98 connection.Service = None
131
99
132
100
133 def connect_vcs(server_and_port, protocol):
101 def connect_vcs(server_and_port, protocol):
134 """
102 """
135 Initializes the connection to the vcs server.
103 Initializes the connection to the vcs server.
136
104
137 :param server_and_port: str, e.g. "localhost:9900"
105 :param server_and_port: str, e.g. "localhost:9900"
138 :param protocol: str, "pyro4" or "http"
106 :param protocol: str or "http"
139 """
107 """
140 if protocol == 'pyro4':
108 if protocol == 'http':
141 connect_pyro4(server_and_port)
142 elif protocol == 'http':
143 connect_http(server_and_port)
109 connect_http(server_and_port)
144 else:
110 else:
145 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
111 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
146
112
147
113
148 # TODO: johbo: This function should be moved into our test suite, there is
114 # TODO: johbo: This function should be moved into our test suite, there is
149 # no reason to support starting the vcsserver in Enterprise itself.
115 # no reason to support starting the vcsserver in Enterprise itself.
150 def start_vcs_server(server_and_port, protocol, log_level=None):
116 def start_vcs_server(server_and_port, protocol, log_level=None):
151 """
117 """
152 Starts the vcs server in a subprocess.
118 Starts the vcs server in a subprocess.
153 """
119 """
154 log.info('Starting VCSServer as a sub process with %s protocol', protocol)
120 log.info('Starting VCSServer as a sub process with %s protocol', protocol)
155 if protocol == 'http':
121 if protocol == 'http':
156 return _start_http_vcs_server(server_and_port, log_level)
122 return _start_http_vcs_server(server_and_port, log_level)
157 elif protocol == 'pyro4':
158 return _start_pyro4_vcs_server(server_and_port, log_level)
159 else:
123 else:
160 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
124 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
161
125
162
126
163 def _start_pyro4_vcs_server(server_and_port, log_level=None):
164 _try_to_shutdown_running_server(server_and_port, protocol='pyro4')
165 host, port = server_and_port.rsplit(":", 1)
166 host = host.strip('[]')
167 args = [
168 'vcsserver', '--port', port, '--host', host, '--locale', 'en_US.UTF-8',
169 '--threadpool', '32']
170 if log_level:
171 args += ['--log-level', log_level]
172 proc = subprocess32.Popen(args)
173
174 def cleanup_server_process():
175 proc.kill()
176 atexit.register(cleanup_server_process)
177
178 server = create_vcsserver_proxy(server_and_port, protocol='pyro4')
179 _wait_until_vcs_server_is_reachable(server)
180
181
182 def _start_http_vcs_server(server_and_port, log_level=None):
127 def _start_http_vcs_server(server_and_port, log_level=None):
183 # TODO: mikhail: shutdown if an http server already runs
128 # TODO: mikhail: shutdown if an http server already runs
184
129
185 host, port = server_and_port.rsplit(":", 1)
130 host, port = server_and_port.rsplit(":", 1)
186 args = [
131 args = [
187 'pserve', 'rhodecode/tests/vcsserver_http.ini',
132 'pserve', 'rhodecode/tests/vcsserver_http.ini',
188 'http_port=%s' % (port, ), 'http_host=%s' % (host, )]
133 'http_port=%s' % (port, ), 'http_host=%s' % (host, )]
189 proc = subprocess32.Popen(args)
134 proc = subprocess32.Popen(args)
190
135
191 def cleanup_server_process():
136 def cleanup_server_process():
192 proc.kill()
137 proc.kill()
193 atexit.register(cleanup_server_process)
138 atexit.register(cleanup_server_process)
194
139
195 server = create_vcsserver_proxy(server_and_port, protocol='http')
140 server = create_vcsserver_proxy(server_and_port, protocol='http')
196 _wait_until_vcs_server_is_reachable(server)
141 _wait_until_vcs_server_is_reachable(server)
197
142
198
143
199 def _wait_until_vcs_server_is_reachable(server, timeout=40):
144 def _wait_until_vcs_server_is_reachable(server, timeout=40):
200 begin = time.time()
145 begin = time.time()
201 while (time.time() - begin) < timeout:
146 while (time.time() - begin) < timeout:
202 try:
147 try:
203 server.ping()
148 server.ping()
204 return
149 return
205 except (VCSCommunicationError, CommunicationError, pycurl.error):
150 except (VCSCommunicationError, pycurl.error):
206 log.debug('VCSServer not started yet, retry to connect.')
151 log.debug('VCSServer not started yet, retry to connect.')
207 time.sleep(0.5)
152 time.sleep(0.5)
208 raise Exception(
153 raise Exception(
209 'Starting the VCSServer failed or took more than {} '
154 'Starting the VCSServer failed or took more than {} '
210 'seconds.'.format(timeout))
155 'seconds.'.format(timeout))
211
156
212
157
213 def _try_to_shutdown_running_server(server_and_port, protocol):
158 def _try_to_shutdown_running_server(server_and_port, protocol):
214 server = create_vcsserver_proxy(server_and_port, protocol)
159 server = create_vcsserver_proxy(server_and_port, protocol)
215 try:
160 try:
216 server.shutdown()
161 server.shutdown()
217 except (CommunicationError, pycurl.error):
162 except pycurl.error:
218 return
163 return
219
164
220 # TODO: Not sure why this is important, but without it the following start
165 # TODO: Not sure why this is important, but without it the following start
221 # of the server fails.
166 # of the server fails.
222 server = create_vcsserver_proxy(server_and_port, protocol)
167 server = create_vcsserver_proxy(server_and_port, protocol)
223 server.ping()
168 server.ping()
224
169
225
170
226 def create_vcsserver_proxy(server_and_port, protocol):
171 def create_vcsserver_proxy(server_and_port, protocol):
227 if protocol == 'pyro4':
172 if protocol == 'http':
228 return _create_vcsserver_proxy_pyro4(server_and_port)
229 elif protocol == 'http':
230 return _create_vcsserver_proxy_http(server_and_port)
173 return _create_vcsserver_proxy_http(server_and_port)
231 else:
174 else:
232 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
175 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
233
176
234
177
235 def _create_vcsserver_proxy_pyro4(server_and_port):
236 server = Pyro4.Proxy(
237 settings.pyro_remote(settings.PYRO_VCSSERVER, server_and_port))
238 return server
239
240
241 def _create_vcsserver_proxy_http(server_and_port):
178 def _create_vcsserver_proxy_http(server_and_port):
242 from rhodecode.lib.vcs import client_http
179 from rhodecode.lib.vcs import client_http
243
180
244 session = _create_http_rpc_session()
181 session = _create_http_rpc_session()
245 url = urlparse.urljoin('http://%s' % server_and_port, '/server')
182 url = urlparse.urljoin('http://%s' % server_and_port, '/server')
246 return client_http.RemoteObject(url, session)
183 return client_http.RemoteObject(url, session)
247
184
248
185
249 class CurlSession(object):
186 class CurlSession(object):
250 """
187 """
251 Modeled so that it provides a subset of the requests interface.
188 Modeled so that it provides a subset of the requests interface.
252
189
253 This has been created so that it does only provide a minimal API for our
190 This has been created so that it does only provide a minimal API for our
254 needs. The parts which it provides are based on the API of the library
191 needs. The parts which it provides are based on the API of the library
255 `requests` which allows us to easily benchmark against it.
192 `requests` which allows us to easily benchmark against it.
256
193
257 Please have a look at the class :class:`requests.Session` when you extend
194 Please have a look at the class :class:`requests.Session` when you extend
258 it.
195 it.
259 """
196 """
260
197
261 def __init__(self):
198 def __init__(self):
262 curl = pycurl.Curl()
199 curl = pycurl.Curl()
263 # TODO: johbo: I did test with 7.19 of libcurl. This version has
200 # TODO: johbo: I did test with 7.19 of libcurl. This version has
264 # trouble with 100 - continue being set in the expect header. This
201 # trouble with 100 - continue being set in the expect header. This
265 # can lead to massive performance drops, switching it off here.
202 # can lead to massive performance drops, switching it off here.
266 curl.setopt(curl.HTTPHEADER, ["Expect:"])
203 curl.setopt(curl.HTTPHEADER, ["Expect:"])
267 curl.setopt(curl.TCP_NODELAY, True)
204 curl.setopt(curl.TCP_NODELAY, True)
268 curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP)
205 curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP)
269 self._curl = curl
206 self._curl = curl
270
207
271 def post(self, url, data, allow_redirects=False):
208 def post(self, url, data, allow_redirects=False):
272 response_buffer = StringIO()
209 response_buffer = StringIO()
273
210
274 curl = self._curl
211 curl = self._curl
275 curl.setopt(curl.URL, url)
212 curl.setopt(curl.URL, url)
276 curl.setopt(curl.POST, True)
213 curl.setopt(curl.POST, True)
277 curl.setopt(curl.POSTFIELDS, data)
214 curl.setopt(curl.POSTFIELDS, data)
278 curl.setopt(curl.FOLLOWLOCATION, allow_redirects)
215 curl.setopt(curl.FOLLOWLOCATION, allow_redirects)
279 curl.setopt(curl.WRITEDATA, response_buffer)
216 curl.setopt(curl.WRITEDATA, response_buffer)
280 curl.perform()
217 curl.perform()
281
218
282 return CurlResponse(response_buffer)
219 return CurlResponse(response_buffer)
283
220
284
221
285 class CurlResponse(object):
222 class CurlResponse(object):
286 """
223 """
287 The response of a request, modeled after the requests API.
224 The response of a request, modeled after the requests API.
288
225
289 This class provides a subset of the response interface known from the
226 This class provides a subset of the response interface known from the
290 library `requests`. It is intentionally kept similar, so that we can use
227 library `requests`. It is intentionally kept similar, so that we can use
291 `requests` as a drop in replacement for benchmarking purposes.
228 `requests` as a drop in replacement for benchmarking purposes.
292 """
229 """
293
230
294 def __init__(self, response_buffer):
231 def __init__(self, response_buffer):
295 self._response_buffer = response_buffer
232 self._response_buffer = response_buffer
296
233
297 @property
234 @property
298 def content(self):
235 def content(self):
299 return self._response_buffer.getvalue()
236 return self._response_buffer.getvalue()
300
237
301
238
302 def _create_http_rpc_session():
239 def _create_http_rpc_session():
303 session = CurlSession()
240 session = CurlSession()
304 return session
241 return session
@@ -1,291 +1,284 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Client for the VCSServer implemented based on HTTP.
22 Client for the VCSServer implemented based on HTTP.
23
24
25 Status
26 ------
27
28 This client implementation shall eventually replace the Pyro4 based
29 implementation.
30 """
23 """
31
24
32 import copy
25 import copy
33 import logging
26 import logging
34 import threading
27 import threading
35 import urllib2
28 import urllib2
36 import urlparse
29 import urlparse
37 import uuid
30 import uuid
38
31
39 import pycurl
32 import pycurl
40 import msgpack
33 import msgpack
41 import requests
34 import requests
42
35
43 from . import exceptions, CurlSession
36 from . import exceptions, CurlSession
44
37
45
38
46 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
47
40
48
41
49 # TODO: mikhail: Keep it in sync with vcsserver's
42 # TODO: mikhail: Keep it in sync with vcsserver's
50 # HTTPApplication.ALLOWED_EXCEPTIONS
43 # HTTPApplication.ALLOWED_EXCEPTIONS
51 EXCEPTIONS_MAP = {
44 EXCEPTIONS_MAP = {
52 'KeyError': KeyError,
45 'KeyError': KeyError,
53 'URLError': urllib2.URLError,
46 'URLError': urllib2.URLError,
54 }
47 }
55
48
56
49
57 class RepoMaker(object):
50 class RepoMaker(object):
58
51
59 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
52 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
60 self.url = urlparse.urljoin(
53 self.url = urlparse.urljoin(
61 'http://%s' % server_and_port, backend_endpoint)
54 'http://%s' % server_and_port, backend_endpoint)
62 self._session_factory = session_factory
55 self._session_factory = session_factory
63 self.backend_type = backend_type
56 self.backend_type = backend_type
64
57
65 def __call__(self, path, config, with_wire=None):
58 def __call__(self, path, config, with_wire=None):
66 log.debug('RepoMaker call on %s', path)
59 log.debug('RepoMaker call on %s', path)
67 return RemoteRepo(
60 return RemoteRepo(
68 path, config, self.url, self._session_factory(),
61 path, config, self.url, self._session_factory(),
69 with_wire=with_wire)
62 with_wire=with_wire)
70
63
71 def __getattr__(self, name):
64 def __getattr__(self, name):
72 def f(*args, **kwargs):
65 def f(*args, **kwargs):
73 return self._call(name, *args, **kwargs)
66 return self._call(name, *args, **kwargs)
74 return f
67 return f
75
68
76 @exceptions.map_vcs_exceptions
69 @exceptions.map_vcs_exceptions
77 def _call(self, name, *args, **kwargs):
70 def _call(self, name, *args, **kwargs):
78 payload = {
71 payload = {
79 'id': str(uuid.uuid4()),
72 'id': str(uuid.uuid4()),
80 'method': name,
73 'method': name,
81 'backend': self.backend_type,
74 'backend': self.backend_type,
82 'params': {'args': args, 'kwargs': kwargs}
75 'params': {'args': args, 'kwargs': kwargs}
83 }
76 }
84 return _remote_call(
77 return _remote_call(
85 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
78 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
86
79
87
80
88 class ServiceConnection(object):
81 class ServiceConnection(object):
89 def __init__(self, server_and_port, backend_endpoint, session_factory):
82 def __init__(self, server_and_port, backend_endpoint, session_factory):
90 self.url = urlparse.urljoin(
83 self.url = urlparse.urljoin(
91 'http://%s' % server_and_port, backend_endpoint)
84 'http://%s' % server_and_port, backend_endpoint)
92 self._session_factory = session_factory
85 self._session_factory = session_factory
93
86
94 def __getattr__(self, name):
87 def __getattr__(self, name):
95 def f(*args, **kwargs):
88 def f(*args, **kwargs):
96 return self._call(name, *args, **kwargs)
89 return self._call(name, *args, **kwargs)
97
90
98 return f
91 return f
99
92
100 @exceptions.map_vcs_exceptions
93 @exceptions.map_vcs_exceptions
101 def _call(self, name, *args, **kwargs):
94 def _call(self, name, *args, **kwargs):
102 payload = {
95 payload = {
103 'id': str(uuid.uuid4()),
96 'id': str(uuid.uuid4()),
104 'method': name,
97 'method': name,
105 'params': {'args': args, 'kwargs': kwargs}
98 'params': {'args': args, 'kwargs': kwargs}
106 }
99 }
107 return _remote_call(
100 return _remote_call(
108 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
101 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
109
102
110
103
111 class RemoteRepo(object):
104 class RemoteRepo(object):
112
105
113 def __init__(self, path, config, url, session, with_wire=None):
106 def __init__(self, path, config, url, session, with_wire=None):
114 self.url = url
107 self.url = url
115 self._session = session
108 self._session = session
116 self._wire = {
109 self._wire = {
117 "path": path,
110 "path": path,
118 "config": config,
111 "config": config,
119 "context": self._create_vcs_cache_context(),
112 "context": self._create_vcs_cache_context(),
120 }
113 }
121 if with_wire:
114 if with_wire:
122 self._wire.update(with_wire)
115 self._wire.update(with_wire)
123
116
124 # johbo: Trading complexity for performance. Avoiding the call to
117 # johbo: Trading complexity for performance. Avoiding the call to
125 # log.debug brings a few percent gain even if is is not active.
118 # log.debug brings a few percent gain even if is is not active.
126 if log.isEnabledFor(logging.DEBUG):
119 if log.isEnabledFor(logging.DEBUG):
127 self._call = self._call_with_logging
120 self._call = self._call_with_logging
128
121
129 def __getattr__(self, name):
122 def __getattr__(self, name):
130 def f(*args, **kwargs):
123 def f(*args, **kwargs):
131 return self._call(name, *args, **kwargs)
124 return self._call(name, *args, **kwargs)
132 return f
125 return f
133
126
134 @exceptions.map_vcs_exceptions
127 @exceptions.map_vcs_exceptions
135 def _call(self, name, *args, **kwargs):
128 def _call(self, name, *args, **kwargs):
136 # TODO: oliver: This is currently necessary pre-call since the
129 # TODO: oliver: This is currently necessary pre-call since the
137 # config object is being changed for hooking scenarios
130 # config object is being changed for hooking scenarios
138 wire = copy.deepcopy(self._wire)
131 wire = copy.deepcopy(self._wire)
139 wire["config"] = wire["config"].serialize()
132 wire["config"] = wire["config"].serialize()
140 payload = {
133 payload = {
141 'id': str(uuid.uuid4()),
134 'id': str(uuid.uuid4()),
142 'method': name,
135 'method': name,
143 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
136 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
144 }
137 }
145 return _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session)
138 return _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session)
146
139
147 def _call_with_logging(self, name, *args, **kwargs):
140 def _call_with_logging(self, name, *args, **kwargs):
148 log.debug('Calling %s@%s', self.url, name)
141 log.debug('Calling %s@%s', self.url, name)
149 return RemoteRepo._call(self, name, *args, **kwargs)
142 return RemoteRepo._call(self, name, *args, **kwargs)
150
143
151 def __getitem__(self, key):
144 def __getitem__(self, key):
152 return self.revision(key)
145 return self.revision(key)
153
146
154 def _create_vcs_cache_context(self):
147 def _create_vcs_cache_context(self):
155 """
148 """
156 Creates a unique string which is passed to the VCSServer on every
149 Creates a unique string which is passed to the VCSServer on every
157 remote call. It is used as cache key in the VCSServer.
150 remote call. It is used as cache key in the VCSServer.
158 """
151 """
159 return str(uuid.uuid4())
152 return str(uuid.uuid4())
160
153
161 def invalidate_vcs_cache(self):
154 def invalidate_vcs_cache(self):
162 """
155 """
163 This invalidates the context which is sent to the VCSServer on every
156 This invalidates the context which is sent to the VCSServer on every
164 call to a remote method. It forces the VCSServer to create a fresh
157 call to a remote method. It forces the VCSServer to create a fresh
165 repository instance on the next call to a remote method.
158 repository instance on the next call to a remote method.
166 """
159 """
167 self._wire['context'] = self._create_vcs_cache_context()
160 self._wire['context'] = self._create_vcs_cache_context()
168
161
169
162
170 class RemoteObject(object):
163 class RemoteObject(object):
171
164
172 def __init__(self, url, session):
165 def __init__(self, url, session):
173 self._url = url
166 self._url = url
174 self._session = session
167 self._session = session
175
168
176 # johbo: Trading complexity for performance. Avoiding the call to
169 # johbo: Trading complexity for performance. Avoiding the call to
177 # log.debug brings a few percent gain even if is is not active.
170 # log.debug brings a few percent gain even if is is not active.
178 if log.isEnabledFor(logging.DEBUG):
171 if log.isEnabledFor(logging.DEBUG):
179 self._call = self._call_with_logging
172 self._call = self._call_with_logging
180
173
181 def __getattr__(self, name):
174 def __getattr__(self, name):
182 def f(*args, **kwargs):
175 def f(*args, **kwargs):
183 return self._call(name, *args, **kwargs)
176 return self._call(name, *args, **kwargs)
184 return f
177 return f
185
178
186 @exceptions.map_vcs_exceptions
179 @exceptions.map_vcs_exceptions
187 def _call(self, name, *args, **kwargs):
180 def _call(self, name, *args, **kwargs):
188 payload = {
181 payload = {
189 'id': str(uuid.uuid4()),
182 'id': str(uuid.uuid4()),
190 'method': name,
183 'method': name,
191 'params': {'args': args, 'kwargs': kwargs}
184 'params': {'args': args, 'kwargs': kwargs}
192 }
185 }
193 return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session)
186 return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session)
194
187
195 def _call_with_logging(self, name, *args, **kwargs):
188 def _call_with_logging(self, name, *args, **kwargs):
196 log.debug('Calling %s@%s', self._url, name)
189 log.debug('Calling %s@%s', self._url, name)
197 return RemoteObject._call(self, name, *args, **kwargs)
190 return RemoteObject._call(self, name, *args, **kwargs)
198
191
199
192
200 def _remote_call(url, payload, exceptions_map, session):
193 def _remote_call(url, payload, exceptions_map, session):
201 try:
194 try:
202 response = session.post(url, data=msgpack.packb(payload))
195 response = session.post(url, data=msgpack.packb(payload))
203 except pycurl.error as e:
196 except pycurl.error as e:
204 raise exceptions.HttpVCSCommunicationError(e)
197 raise exceptions.HttpVCSCommunicationError(e)
205
198
206 try:
199 try:
207 response = msgpack.unpackb(response.content)
200 response = msgpack.unpackb(response.content)
208 except Exception:
201 except Exception:
209 log.exception('Failed to decode repsponse %r', response.content)
202 log.exception('Failed to decode repsponse %r', response.content)
210 raise
203 raise
211
204
212 error = response.get('error')
205 error = response.get('error')
213 if error:
206 if error:
214 type_ = error.get('type', 'Exception')
207 type_ = error.get('type', 'Exception')
215 exc = exceptions_map.get(type_, Exception)
208 exc = exceptions_map.get(type_, Exception)
216 exc = exc(error.get('message'))
209 exc = exc(error.get('message'))
217 try:
210 try:
218 exc._vcs_kind = error['_vcs_kind']
211 exc._vcs_kind = error['_vcs_kind']
219 except KeyError:
212 except KeyError:
220 pass
213 pass
221
214
222 try:
215 try:
223 exc._vcs_server_traceback = error['traceback']
216 exc._vcs_server_traceback = error['traceback']
224 except KeyError:
217 except KeyError:
225 pass
218 pass
226
219
227 raise exc
220 raise exc
228 return response.get('result')
221 return response.get('result')
229
222
230
223
231 class VcsHttpProxy(object):
224 class VcsHttpProxy(object):
232
225
233 CHUNK_SIZE = 16384
226 CHUNK_SIZE = 16384
234
227
235 def __init__(self, server_and_port, backend_endpoint):
228 def __init__(self, server_and_port, backend_endpoint):
236 adapter = requests.adapters.HTTPAdapter(max_retries=5)
229 adapter = requests.adapters.HTTPAdapter(max_retries=5)
237 self.base_url = urlparse.urljoin(
230 self.base_url = urlparse.urljoin(
238 'http://%s' % server_and_port, backend_endpoint)
231 'http://%s' % server_and_port, backend_endpoint)
239 self.session = requests.Session()
232 self.session = requests.Session()
240 self.session.mount('http://', adapter)
233 self.session.mount('http://', adapter)
241
234
242 def handle(self, environment, input_data, *args, **kwargs):
235 def handle(self, environment, input_data, *args, **kwargs):
243 data = {
236 data = {
244 'environment': environment,
237 'environment': environment,
245 'input_data': input_data,
238 'input_data': input_data,
246 'args': args,
239 'args': args,
247 'kwargs': kwargs
240 'kwargs': kwargs
248 }
241 }
249 result = self.session.post(
242 result = self.session.post(
250 self.base_url, msgpack.packb(data), stream=True)
243 self.base_url, msgpack.packb(data), stream=True)
251 return self._get_result(result)
244 return self._get_result(result)
252
245
253 def _deserialize_and_raise(self, error):
246 def _deserialize_and_raise(self, error):
254 exception = Exception(error['message'])
247 exception = Exception(error['message'])
255 try:
248 try:
256 exception._vcs_kind = error['_vcs_kind']
249 exception._vcs_kind = error['_vcs_kind']
257 except KeyError:
250 except KeyError:
258 pass
251 pass
259 raise exception
252 raise exception
260
253
261 def _iterate(self, result):
254 def _iterate(self, result):
262 unpacker = msgpack.Unpacker()
255 unpacker = msgpack.Unpacker()
263 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
256 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
264 unpacker.feed(line)
257 unpacker.feed(line)
265 for chunk in unpacker:
258 for chunk in unpacker:
266 yield chunk
259 yield chunk
267
260
268 def _get_result(self, result):
261 def _get_result(self, result):
269 iterator = self._iterate(result)
262 iterator = self._iterate(result)
270 error = iterator.next()
263 error = iterator.next()
271 if error:
264 if error:
272 self._deserialize_and_raise(error)
265 self._deserialize_and_raise(error)
273
266
274 status = iterator.next()
267 status = iterator.next()
275 headers = iterator.next()
268 headers = iterator.next()
276
269
277 return iterator, status, headers
270 return iterator, status, headers
278
271
279
272
280 class ThreadlocalSessionFactory(object):
273 class ThreadlocalSessionFactory(object):
281 """
274 """
282 Creates one CurlSession per thread on demand.
275 Creates one CurlSession per thread on demand.
283 """
276 """
284
277
285 def __init__(self):
278 def __init__(self):
286 self._thread_local = threading.local()
279 self._thread_local = threading.local()
287
280
288 def __call__(self):
281 def __call__(self):
289 if not hasattr(self._thread_local, 'curl_session'):
282 if not hasattr(self._thread_local, 'curl_session'):
290 self._thread_local.curl_session = CurlSession()
283 self._thread_local.curl_session = CurlSession()
291 return self._thread_local.curl_session
284 return self._thread_local.curl_session
@@ -1,87 +1,66 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2017 RhodeCode GmbH
3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Internal settings for vcs-lib
22 Internal settings for vcs-lib
23 """
23 """
24
24
25 # list of default encoding used in safe_unicode/safe_str methods
25 # list of default encoding used in safe_unicode/safe_str methods
26 DEFAULT_ENCODINGS = ['utf8']
26 DEFAULT_ENCODINGS = ['utf8']
27
27
28 # Optional arguments to rev-filter, it has to be a list
28 # Optional arguments to rev-filter, it has to be a list
29 # It can also be ['--branches', '--tags']
29 # It can also be ['--branches', '--tags']
30 GIT_REV_FILTER = ['--all']
30 GIT_REV_FILTER = ['--all']
31
31
32 # Compatibility version when creating SVN repositories. None means newest.
32 # Compatibility version when creating SVN repositories. None means newest.
33 # Other available options are: pre-1.4-compatible, pre-1.5-compatible,
33 # Other available options are: pre-1.4-compatible, pre-1.5-compatible,
34 # pre-1.6-compatible, pre-1.8-compatible
34 # pre-1.6-compatible, pre-1.8-compatible
35 SVN_COMPATIBLE_VERSION = None
35 SVN_COMPATIBLE_VERSION = None
36
36
37 ALIASES = ['hg', 'git', 'svn']
37 ALIASES = ['hg', 'git', 'svn']
38
38
39 BACKENDS = {
39 BACKENDS = {
40 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
40 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
41 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
41 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
42 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
42 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
43 }
43 }
44
44
45 # TODO: Remove once controllers/files.py is adjusted
45 # TODO: Remove once controllers/files.py is adjusted
46 ARCHIVE_SPECS = {
46 ARCHIVE_SPECS = {
47 'tbz2': ('application/x-bzip2', '.tar.bz2'),
47 'tbz2': ('application/x-bzip2', '.tar.bz2'),
48 'tgz': ('application/x-gzip', '.tar.gz'),
48 'tgz': ('application/x-gzip', '.tar.gz'),
49 'zip': ('application/zip', '.zip'),
49 'zip': ('application/zip', '.zip'),
50 }
50 }
51
51
52 HOOKS_PROTOCOL = None
52 HOOKS_PROTOCOL = None
53 HOOKS_DIRECT_CALLS = False
53 HOOKS_DIRECT_CALLS = False
54
54
55 PYRO_PORT = 9900
56
57 PYRO_GIT = 'git_remote'
58 PYRO_HG = 'hg_remote'
59 PYRO_SVN = 'svn_remote'
60 PYRO_VCSSERVER = 'vcs_server'
61 PYRO_GIT_REMOTE_WSGI = 'git_remote_wsgi'
62 PYRO_HG_REMOTE_WSGI = 'hg_remote_wsgi'
63
64 PYRO_RECONNECT_TRIES = 15
65 """
66 How many retries to reconnect will be performed if the connection was lost.
67
68 Each try takes 2s. Doing 15 means that we will give it up to 30s for a
69 connection to be re-established.
70 """
71
72
73 def pyro_remote(object_id, server_and_port):
74 return "PYRO:%s@%s" % (object_id, server_and_port)
75
76
55
77 def available_aliases():
56 def available_aliases():
78 """
57 """
79 Mercurial is required for the system to work, so in case vcs.backends does
58 Mercurial is required for the system to work, so in case vcs.backends does
80 not include it, we make sure it will be available internally
59 not include it, we make sure it will be available internally
81 TODO: anderson: refactor vcs.backends so it won't be necessary, VCS server
60 TODO: anderson: refactor vcs.backends so it won't be necessary, VCS server
82 should be responsible to dictate available backends.
61 should be responsible to dictate available backends.
83 """
62 """
84 aliases = ALIASES[:]
63 aliases = ALIASES[:]
85 if 'hg' not in aliases:
64 if 'hg' not in aliases:
86 aliases += ['hg']
65 aliases += ['hg']
87 return aliases
66 return aliases
@@ -1,205 +1,201 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2017 RhodeCode GmbH
3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Custom vcs exceptions module.
22 Custom vcs exceptions module.
23 """
23 """
24
24
25 import functools
25 import functools
26 import urllib2
26 import urllib2
27
27
28
28
29 class VCSCommunicationError(Exception):
29 class VCSCommunicationError(Exception):
30 pass
30 pass
31
31
32
32
33 class PyroVCSCommunicationError(VCSCommunicationError):
34 pass
35
36
37 class HttpVCSCommunicationError(VCSCommunicationError):
33 class HttpVCSCommunicationError(VCSCommunicationError):
38 pass
34 pass
39
35
40
36
41 class VCSError(Exception):
37 class VCSError(Exception):
42 pass
38 pass
43
39
44
40
45 class RepositoryError(VCSError):
41 class RepositoryError(VCSError):
46 pass
42 pass
47
43
48
44
49 class RepositoryRequirementError(RepositoryError):
45 class RepositoryRequirementError(RepositoryError):
50 pass
46 pass
51
47
52
48
53 class VCSBackendNotSupportedError(VCSError):
49 class VCSBackendNotSupportedError(VCSError):
54 """
50 """
55 Exception raised when VCSServer does not support requested backend
51 Exception raised when VCSServer does not support requested backend
56 """
52 """
57
53
58
54
59 class EmptyRepositoryError(RepositoryError):
55 class EmptyRepositoryError(RepositoryError):
60 pass
56 pass
61
57
62
58
63 class TagAlreadyExistError(RepositoryError):
59 class TagAlreadyExistError(RepositoryError):
64 pass
60 pass
65
61
66
62
67 class TagDoesNotExistError(RepositoryError):
63 class TagDoesNotExistError(RepositoryError):
68 pass
64 pass
69
65
70
66
71 class BranchAlreadyExistError(RepositoryError):
67 class BranchAlreadyExistError(RepositoryError):
72 pass
68 pass
73
69
74
70
75 class BranchDoesNotExistError(RepositoryError):
71 class BranchDoesNotExistError(RepositoryError):
76 pass
72 pass
77
73
78
74
79 class CommitError(RepositoryError):
75 class CommitError(RepositoryError):
80 """
76 """
81 Exceptions related to an existing commit
77 Exceptions related to an existing commit
82 """
78 """
83
79
84
80
85 class CommitDoesNotExistError(CommitError):
81 class CommitDoesNotExistError(CommitError):
86 pass
82 pass
87
83
88
84
89 class CommittingError(RepositoryError):
85 class CommittingError(RepositoryError):
90 """
86 """
91 Exceptions happening while creating a new commit
87 Exceptions happening while creating a new commit
92 """
88 """
93
89
94
90
95 class NothingChangedError(CommittingError):
91 class NothingChangedError(CommittingError):
96 pass
92 pass
97
93
98
94
99 class NodeError(VCSError):
95 class NodeError(VCSError):
100 pass
96 pass
101
97
102
98
103 class RemovedFileNodeError(NodeError):
99 class RemovedFileNodeError(NodeError):
104 pass
100 pass
105
101
106
102
107 class NodeAlreadyExistsError(CommittingError):
103 class NodeAlreadyExistsError(CommittingError):
108 pass
104 pass
109
105
110
106
111 class NodeAlreadyChangedError(CommittingError):
107 class NodeAlreadyChangedError(CommittingError):
112 pass
108 pass
113
109
114
110
115 class NodeDoesNotExistError(CommittingError):
111 class NodeDoesNotExistError(CommittingError):
116 pass
112 pass
117
113
118
114
119 class NodeNotChangedError(CommittingError):
115 class NodeNotChangedError(CommittingError):
120 pass
116 pass
121
117
122
118
123 class NodeAlreadyAddedError(CommittingError):
119 class NodeAlreadyAddedError(CommittingError):
124 pass
120 pass
125
121
126
122
127 class NodeAlreadyRemovedError(CommittingError):
123 class NodeAlreadyRemovedError(CommittingError):
128 pass
124 pass
129
125
130
126
131 class SubrepoMergeError(RepositoryError):
127 class SubrepoMergeError(RepositoryError):
132 """
128 """
133 This happens if we try to merge a repository which contains subrepos and
129 This happens if we try to merge a repository which contains subrepos and
134 the subrepos cannot be merged. The subrepos are not merged itself but
130 the subrepos cannot be merged. The subrepos are not merged itself but
135 their references in the root repo are merged.
131 their references in the root repo are merged.
136 """
132 """
137
133
138
134
139 class ImproperArchiveTypeError(VCSError):
135 class ImproperArchiveTypeError(VCSError):
140 pass
136 pass
141
137
142
138
143 class CommandError(VCSError):
139 class CommandError(VCSError):
144 pass
140 pass
145
141
146
142
147 class UnhandledException(VCSError):
143 class UnhandledException(VCSError):
148 """
144 """
149 Signals that something unexpected went wrong.
145 Signals that something unexpected went wrong.
150
146
151 This usually means we have a programming error on the side of the VCSServer
147 This usually means we have a programming error on the side of the VCSServer
152 and should inspect the logfile of the VCSServer to find more details.
148 and should inspect the logfile of the VCSServer to find more details.
153 """
149 """
154
150
155
151
156 _EXCEPTION_MAP = {
152 _EXCEPTION_MAP = {
157 'abort': RepositoryError,
153 'abort': RepositoryError,
158 'archive': ImproperArchiveTypeError,
154 'archive': ImproperArchiveTypeError,
159 'error': RepositoryError,
155 'error': RepositoryError,
160 'lookup': CommitDoesNotExistError,
156 'lookup': CommitDoesNotExistError,
161 'repo_locked': RepositoryError,
157 'repo_locked': RepositoryError,
162 'requirement': RepositoryRequirementError,
158 'requirement': RepositoryRequirementError,
163 'unhandled': UnhandledException,
159 'unhandled': UnhandledException,
164 # TODO: johbo: Define our own exception for this and stop abusing
160 # TODO: johbo: Define our own exception for this and stop abusing
165 # urllib's exception class.
161 # urllib's exception class.
166 'url_error': urllib2.URLError,
162 'url_error': urllib2.URLError,
167 'subrepo_merge_error': SubrepoMergeError,
163 'subrepo_merge_error': SubrepoMergeError,
168 }
164 }
169
165
170
166
171 def map_vcs_exceptions(func):
167 def map_vcs_exceptions(func):
172 """
168 """
173 Utility to decorate functions so that plain exceptions are translated.
169 Utility to decorate functions so that plain exceptions are translated.
174
170
175 The translation is based on `exc_map` which maps a `str` indicating
171 The translation is based on `exc_map` which maps a `str` indicating
176 the error type into an exception class representing this error inside
172 the error type into an exception class representing this error inside
177 of the vcs layer.
173 of the vcs layer.
178 """
174 """
179
175
180 @functools.wraps(func)
176 @functools.wraps(func)
181 def wrapper(*args, **kwargs):
177 def wrapper(*args, **kwargs):
182 try:
178 try:
183 return func(*args, **kwargs)
179 return func(*args, **kwargs)
184 except Exception as e:
180 except Exception as e:
185 # The error middleware adds information if it finds
181 # The error middleware adds information if it finds
186 # __traceback_info__ in a frame object. This way the remote
182 # __traceback_info__ in a frame object. This way the remote
187 # traceback information is made available in error reports.
183 # traceback information is made available in error reports.
188 remote_tb = getattr(e, '_vcs_server_traceback', None)
184 remote_tb = getattr(e, '_vcs_server_traceback', None)
189 if remote_tb:
185 if remote_tb:
190 __traceback_info__ = (
186 __traceback_info__ = (
191 'Found VCSServer remote traceback information:\n\n' +
187 'Found VCSServer remote traceback information:\n\n' +
192 '\n'.join(remote_tb))
188 '\n'.join(remote_tb))
193
189
194 # Avoid that remote_tb also appears in the frame
190 # Avoid that remote_tb also appears in the frame
195 del remote_tb
191 del remote_tb
196
192
197 # Special vcs errors had an attribute "_vcs_kind" which is used
193 # Special vcs errors had an attribute "_vcs_kind" which is used
198 # to translate them to the proper exception class in the vcs
194 # to translate them to the proper exception class in the vcs
199 # client layer.
195 # client layer.
200 kind = getattr(e, '_vcs_kind', None)
196 kind = getattr(e, '_vcs_kind', None)
201 if kind:
197 if kind:
202 raise _EXCEPTION_MAP[kind](*e.args)
198 raise _EXCEPTION_MAP[kind](*e.args)
203 else:
199 else:
204 raise
200 raise
205 return wrapper
201 return wrapper
@@ -1,462 +1,460 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import base64
21 import base64
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.tests.utils import CustomTestApp
26 from rhodecode.tests.utils import CustomTestApp
27
27
28 from rhodecode.lib.caching_query import FromCache
28 from rhodecode.lib.caching_query import FromCache
29 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
29 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
30 from rhodecode.lib.middleware import simplevcs
30 from rhodecode.lib.middleware import simplevcs
31 from rhodecode.lib.middleware.https_fixup import HttpsFixup
31 from rhodecode.lib.middleware.https_fixup import HttpsFixup
32 from rhodecode.lib.middleware.utils import scm_app_http
32 from rhodecode.lib.middleware.utils import scm_app_http
33 from rhodecode.model.db import User, _hash_key
33 from rhodecode.model.db import User, _hash_key
34 from rhodecode.model.meta import Session
34 from rhodecode.model.meta import Session
35 from rhodecode.tests import (
35 from rhodecode.tests import (
36 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
36 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
37 from rhodecode.tests.lib.middleware import mock_scm_app
37 from rhodecode.tests.lib.middleware import mock_scm_app
38 from rhodecode.tests.utils import set_anonymous_access
38 from rhodecode.tests.utils import set_anonymous_access
39
39
40
40
41 class StubVCSController(simplevcs.SimpleVCS):
41 class StubVCSController(simplevcs.SimpleVCS):
42
42
43 SCM = 'hg'
43 SCM = 'hg'
44 stub_response_body = tuple()
44 stub_response_body = tuple()
45
45
46 def __init__(self, *args, **kwargs):
46 def __init__(self, *args, **kwargs):
47 super(StubVCSController, self).__init__(*args, **kwargs)
47 super(StubVCSController, self).__init__(*args, **kwargs)
48 self._action = 'pull'
48 self._action = 'pull'
49 self._name = HG_REPO
49 self._name = HG_REPO
50 self.set_repo_names(None)
50 self.set_repo_names(None)
51
51
52 def _get_repository_name(self, environ):
52 def _get_repository_name(self, environ):
53 return self._name
53 return self._name
54
54
55 def _get_action(self, environ):
55 def _get_action(self, environ):
56 return self._action
56 return self._action
57
57
58 def _create_wsgi_app(self, repo_path, repo_name, config):
58 def _create_wsgi_app(self, repo_path, repo_name, config):
59 def fake_app(environ, start_response):
59 def fake_app(environ, start_response):
60 start_response('200 OK', [])
60 start_response('200 OK', [])
61 return self.stub_response_body
61 return self.stub_response_body
62 return fake_app
62 return fake_app
63
63
64 def _create_config(self, extras, repo_name):
64 def _create_config(self, extras, repo_name):
65 return None
65 return None
66
66
67
67
68 @pytest.fixture
68 @pytest.fixture
69 def vcscontroller(pylonsapp, config_stub):
69 def vcscontroller(pylonsapp, config_stub):
70 config_stub.testing_securitypolicy()
70 config_stub.testing_securitypolicy()
71 config_stub.include('rhodecode.authentication')
71 config_stub.include('rhodecode.authentication')
72
72
73 set_anonymous_access(True)
73 set_anonymous_access(True)
74 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
74 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
75 app = HttpsFixup(controller, pylonsapp.config)
75 app = HttpsFixup(controller, pylonsapp.config)
76 app = CustomTestApp(app)
76 app = CustomTestApp(app)
77
77
78 _remove_default_user_from_query_cache()
78 _remove_default_user_from_query_cache()
79
79
80 # Sanity checks that things are set up correctly
80 # Sanity checks that things are set up correctly
81 app.get('/' + HG_REPO, status=200)
81 app.get('/' + HG_REPO, status=200)
82
82
83 app.controller = controller
83 app.controller = controller
84 return app
84 return app
85
85
86
86
87 def _remove_default_user_from_query_cache():
87 def _remove_default_user_from_query_cache():
88 user = User.get_default_user(cache=True)
88 user = User.get_default_user(cache=True)
89 query = Session().query(User).filter(User.username == user.username)
89 query = Session().query(User).filter(User.username == user.username)
90 query = query.options(FromCache(
90 query = query.options(FromCache(
91 "sql_cache_short", "get_user_%s" % _hash_key(user.username)))
91 "sql_cache_short", "get_user_%s" % _hash_key(user.username)))
92 query.invalidate()
92 query.invalidate()
93 Session().expire(user)
93 Session().expire(user)
94
94
95
95
96 @pytest.fixture
96 @pytest.fixture
97 def disable_anonymous_user(request, pylonsapp):
97 def disable_anonymous_user(request, pylonsapp):
98 set_anonymous_access(False)
98 set_anonymous_access(False)
99
99
100 @request.addfinalizer
100 @request.addfinalizer
101 def cleanup():
101 def cleanup():
102 set_anonymous_access(True)
102 set_anonymous_access(True)
103
103
104
104
105 def test_handles_exceptions_during_permissions_checks(
105 def test_handles_exceptions_during_permissions_checks(
106 vcscontroller, disable_anonymous_user):
106 vcscontroller, disable_anonymous_user):
107 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
107 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
108 auth_password = base64.encodestring(user_and_pass).strip()
108 auth_password = base64.encodestring(user_and_pass).strip()
109 extra_environ = {
109 extra_environ = {
110 'AUTH_TYPE': 'Basic',
110 'AUTH_TYPE': 'Basic',
111 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
111 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
112 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
112 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
113 }
113 }
114
114
115 # Verify that things are hooked up correctly
115 # Verify that things are hooked up correctly
116 vcscontroller.get('/', status=200, extra_environ=extra_environ)
116 vcscontroller.get('/', status=200, extra_environ=extra_environ)
117
117
118 # Simulate trouble during permission checks
118 # Simulate trouble during permission checks
119 with mock.patch('rhodecode.model.db.User.get_by_username',
119 with mock.patch('rhodecode.model.db.User.get_by_username',
120 side_effect=Exception) as get_user:
120 side_effect=Exception) as get_user:
121 # Verify that a correct 500 is returned and check that the expected
121 # Verify that a correct 500 is returned and check that the expected
122 # code path was hit.
122 # code path was hit.
123 vcscontroller.get('/', status=500, extra_environ=extra_environ)
123 vcscontroller.get('/', status=500, extra_environ=extra_environ)
124 assert get_user.called
124 assert get_user.called
125
125
126
126
127 def test_returns_forbidden_if_no_anonymous_access(
127 def test_returns_forbidden_if_no_anonymous_access(
128 vcscontroller, disable_anonymous_user):
128 vcscontroller, disable_anonymous_user):
129 vcscontroller.get('/', status=401)
129 vcscontroller.get('/', status=401)
130
130
131
131
132 class StubFailVCSController(simplevcs.SimpleVCS):
132 class StubFailVCSController(simplevcs.SimpleVCS):
133 def _handle_request(self, environ, start_response):
133 def _handle_request(self, environ, start_response):
134 raise Exception("BOOM")
134 raise Exception("BOOM")
135
135
136
136
137 @pytest.fixture(scope='module')
137 @pytest.fixture(scope='module')
138 def fail_controller(pylonsapp):
138 def fail_controller(pylonsapp):
139 controller = StubFailVCSController(pylonsapp, pylonsapp.config, None)
139 controller = StubFailVCSController(pylonsapp, pylonsapp.config, None)
140 controller = HttpsFixup(controller, pylonsapp.config)
140 controller = HttpsFixup(controller, pylonsapp.config)
141 controller = CustomTestApp(controller)
141 controller = CustomTestApp(controller)
142 return controller
142 return controller
143
143
144
144
145 def test_handles_exceptions_as_internal_server_error(fail_controller):
145 def test_handles_exceptions_as_internal_server_error(fail_controller):
146 fail_controller.get('/', status=500)
146 fail_controller.get('/', status=500)
147
147
148
148
149 def test_provides_traceback_for_appenlight(fail_controller):
149 def test_provides_traceback_for_appenlight(fail_controller):
150 response = fail_controller.get(
150 response = fail_controller.get(
151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
152 assert 'appenlight.__traceback' in response.request.environ
152 assert 'appenlight.__traceback' in response.request.environ
153
153
154
154
155 def test_provides_utils_scm_app_as_scm_app_by_default(pylonsapp):
155 def test_provides_utils_scm_app_as_scm_app_by_default(pylonsapp):
156 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
156 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
157 assert controller.scm_app is scm_app_http
157 assert controller.scm_app is scm_app_http
158
158
159
159
160 def test_allows_to_override_scm_app_via_config(pylonsapp):
160 def test_allows_to_override_scm_app_via_config(pylonsapp):
161 config = pylonsapp.config.copy()
161 config = pylonsapp.config.copy()
162 config['vcs.scm_app_implementation'] = (
162 config['vcs.scm_app_implementation'] = (
163 'rhodecode.tests.lib.middleware.mock_scm_app')
163 'rhodecode.tests.lib.middleware.mock_scm_app')
164 controller = StubVCSController(pylonsapp, config, None)
164 controller = StubVCSController(pylonsapp, config, None)
165 assert controller.scm_app is mock_scm_app
165 assert controller.scm_app is mock_scm_app
166
166
167
167
168 @pytest.mark.parametrize('query_string, expected', [
168 @pytest.mark.parametrize('query_string, expected', [
169 ('cmd=stub_command', True),
169 ('cmd=stub_command', True),
170 ('cmd=listkeys', False),
170 ('cmd=listkeys', False),
171 ])
171 ])
172 def test_should_check_locking(query_string, expected):
172 def test_should_check_locking(query_string, expected):
173 result = simplevcs._should_check_locking(query_string)
173 result = simplevcs._should_check_locking(query_string)
174 assert result == expected
174 assert result == expected
175
175
176
176
177 class TestShadowRepoRegularExpression(object):
177 class TestShadowRepoRegularExpression(object):
178 pr_segment = 'pull-request'
178 pr_segment = 'pull-request'
179 shadow_segment = 'repository'
179 shadow_segment = 'repository'
180
180
181 @pytest.mark.parametrize('url, expected', [
181 @pytest.mark.parametrize('url, expected', [
182 # repo with/without groups
182 # repo with/without groups
183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
187
187
188 # pull request ID
188 # pull request ID
189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
193
193
194 # unicode
194 # unicode
195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
197
197
198 # trailing/leading slash
198 # trailing/leading slash
199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
202
202
203 # misc
203 # misc
204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
206 ])
206 ])
207 def test_shadow_repo_regular_expression(self, url, expected):
207 def test_shadow_repo_regular_expression(self, url, expected):
208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
209 url = url.format(
209 url = url.format(
210 pr_segment=self.pr_segment,
210 pr_segment=self.pr_segment,
211 shadow_segment=self.shadow_segment)
211 shadow_segment=self.shadow_segment)
212 match_obj = SimpleVCS.shadow_repo_re.match(url)
212 match_obj = SimpleVCS.shadow_repo_re.match(url)
213 assert (match_obj is not None) == expected
213 assert (match_obj is not None) == expected
214
214
215
215
216 @pytest.mark.backends('git', 'hg')
216 @pytest.mark.backends('git', 'hg')
217 class TestShadowRepoExposure(object):
217 class TestShadowRepoExposure(object):
218
218
219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(self, pylonsapp):
219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(self, pylonsapp):
220 """
220 """
221 Check that a pull action to a shadow repo is propagated to the
221 Check that a pull action to a shadow repo is propagated to the
222 underlying wsgi app.
222 underlying wsgi app.
223 """
223 """
224 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
224 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
225 controller._check_ssl = mock.Mock()
225 controller._check_ssl = mock.Mock()
226 controller.is_shadow_repo = True
226 controller.is_shadow_repo = True
227 controller._action = 'pull'
227 controller._action = 'pull'
228 controller.stub_response_body = 'dummy body value'
228 controller.stub_response_body = 'dummy body value'
229 environ_stub = {
229 environ_stub = {
230 'HTTP_HOST': 'test.example.com',
230 'HTTP_HOST': 'test.example.com',
231 'REQUEST_METHOD': 'GET',
231 'REQUEST_METHOD': 'GET',
232 'wsgi.url_scheme': 'http',
232 'wsgi.url_scheme': 'http',
233 }
233 }
234
234
235 response = controller(environ_stub, mock.Mock())
235 response = controller(environ_stub, mock.Mock())
236 response_body = ''.join(response)
236 response_body = ''.join(response)
237
237
238 # Assert that we got the response from the wsgi app.
238 # Assert that we got the response from the wsgi app.
239 assert response_body == controller.stub_response_body
239 assert response_body == controller.stub_response_body
240
240
241 def test_push_on_shadow_repo_raises(self, pylonsapp):
241 def test_push_on_shadow_repo_raises(self, pylonsapp):
242 """
242 """
243 Check that a push action to a shadow repo is aborted.
243 Check that a push action to a shadow repo is aborted.
244 """
244 """
245 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
245 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
246 controller._check_ssl = mock.Mock()
246 controller._check_ssl = mock.Mock()
247 controller.is_shadow_repo = True
247 controller.is_shadow_repo = True
248 controller._action = 'push'
248 controller._action = 'push'
249 controller.stub_response_body = 'dummy body value'
249 controller.stub_response_body = 'dummy body value'
250 environ_stub = {
250 environ_stub = {
251 'HTTP_HOST': 'test.example.com',
251 'HTTP_HOST': 'test.example.com',
252 'REQUEST_METHOD': 'GET',
252 'REQUEST_METHOD': 'GET',
253 'wsgi.url_scheme': 'http',
253 'wsgi.url_scheme': 'http',
254 }
254 }
255
255
256 response = controller(environ_stub, mock.Mock())
256 response = controller(environ_stub, mock.Mock())
257 response_body = ''.join(response)
257 response_body = ''.join(response)
258
258
259 assert response_body != controller.stub_response_body
259 assert response_body != controller.stub_response_body
260 # Assert that a 406 error is returned.
260 # Assert that a 406 error is returned.
261 assert '406 Not Acceptable' in response_body
261 assert '406 Not Acceptable' in response_body
262
262
263 def test_set_repo_names_no_shadow(self, pylonsapp):
263 def test_set_repo_names_no_shadow(self, pylonsapp):
264 """
264 """
265 Check that the set_repo_names method sets all names to the one returned
265 Check that the set_repo_names method sets all names to the one returned
266 by the _get_repository_name method on a request to a non shadow repo.
266 by the _get_repository_name method on a request to a non shadow repo.
267 """
267 """
268 environ_stub = {}
268 environ_stub = {}
269 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
269 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
270 controller._name = 'RepoGroup/MyRepo'
270 controller._name = 'RepoGroup/MyRepo'
271 controller.set_repo_names(environ_stub)
271 controller.set_repo_names(environ_stub)
272 assert not controller.is_shadow_repo
272 assert not controller.is_shadow_repo
273 assert (controller.url_repo_name ==
273 assert (controller.url_repo_name ==
274 controller.acl_repo_name ==
274 controller.acl_repo_name ==
275 controller.vcs_repo_name ==
275 controller.vcs_repo_name ==
276 controller._get_repository_name(environ_stub))
276 controller._get_repository_name(environ_stub))
277
277
278 def test_set_repo_names_with_shadow(self, pylonsapp, pr_util):
278 def test_set_repo_names_with_shadow(self, pylonsapp, pr_util):
279 """
279 """
280 Check that the set_repo_names method sets correct names on a request
280 Check that the set_repo_names method sets correct names on a request
281 to a shadow repo.
281 to a shadow repo.
282 """
282 """
283 from rhodecode.model.pull_request import PullRequestModel
283 from rhodecode.model.pull_request import PullRequestModel
284
284
285 pull_request = pr_util.create_pull_request()
285 pull_request = pr_util.create_pull_request()
286 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
286 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
287 target=pull_request.target_repo.repo_name,
287 target=pull_request.target_repo.repo_name,
288 pr_id=pull_request.pull_request_id,
288 pr_id=pull_request.pull_request_id,
289 pr_segment=TestShadowRepoRegularExpression.pr_segment,
289 pr_segment=TestShadowRepoRegularExpression.pr_segment,
290 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
290 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
291 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
291 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
292 controller._name = shadow_url
292 controller._name = shadow_url
293 controller.set_repo_names({})
293 controller.set_repo_names({})
294
294
295 # Get file system path to shadow repo for assertions.
295 # Get file system path to shadow repo for assertions.
296 workspace_id = PullRequestModel()._workspace_id(pull_request)
296 workspace_id = PullRequestModel()._workspace_id(pull_request)
297 target_vcs = pull_request.target_repo.scm_instance()
297 target_vcs = pull_request.target_repo.scm_instance()
298 vcs_repo_name = target_vcs._get_shadow_repository_path(
298 vcs_repo_name = target_vcs._get_shadow_repository_path(
299 workspace_id)
299 workspace_id)
300
300
301 assert controller.vcs_repo_name == vcs_repo_name
301 assert controller.vcs_repo_name == vcs_repo_name
302 assert controller.url_repo_name == shadow_url
302 assert controller.url_repo_name == shadow_url
303 assert controller.acl_repo_name == pull_request.target_repo.repo_name
303 assert controller.acl_repo_name == pull_request.target_repo.repo_name
304 assert controller.is_shadow_repo
304 assert controller.is_shadow_repo
305
305
306 def test_set_repo_names_with_shadow_but_missing_pr(
306 def test_set_repo_names_with_shadow_but_missing_pr(
307 self, pylonsapp, pr_util):
307 self, pylonsapp, pr_util):
308 """
308 """
309 Checks that the set_repo_names method enforces matching target repos
309 Checks that the set_repo_names method enforces matching target repos
310 and pull request IDs.
310 and pull request IDs.
311 """
311 """
312 pull_request = pr_util.create_pull_request()
312 pull_request = pr_util.create_pull_request()
313 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
313 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
314 target=pull_request.target_repo.repo_name,
314 target=pull_request.target_repo.repo_name,
315 pr_id=999999999,
315 pr_id=999999999,
316 pr_segment=TestShadowRepoRegularExpression.pr_segment,
316 pr_segment=TestShadowRepoRegularExpression.pr_segment,
317 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
317 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
318 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
318 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
319 controller._name = shadow_url
319 controller._name = shadow_url
320 controller.set_repo_names({})
320 controller.set_repo_names({})
321
321
322 assert not controller.is_shadow_repo
322 assert not controller.is_shadow_repo
323 assert (controller.url_repo_name ==
323 assert (controller.url_repo_name ==
324 controller.acl_repo_name ==
324 controller.acl_repo_name ==
325 controller.vcs_repo_name)
325 controller.vcs_repo_name)
326
326
327
327
328 @pytest.mark.usefixtures('db')
328 @pytest.mark.usefixtures('db')
329 @mock.patch.multiple(
330 'Pyro4.config', SERVERTYPE='multiplex', POLLTIMEOUT=0.01)
331 class TestGenerateVcsResponse:
329 class TestGenerateVcsResponse:
332
330
333 def test_ensures_that_start_response_is_called_early_enough(self):
331 def test_ensures_that_start_response_is_called_early_enough(self):
334 self.call_controller_with_response_body(iter(['a', 'b']))
332 self.call_controller_with_response_body(iter(['a', 'b']))
335 assert self.start_response.called
333 assert self.start_response.called
336
334
337 def test_invalidates_cache_after_body_is_consumed(self):
335 def test_invalidates_cache_after_body_is_consumed(self):
338 result = self.call_controller_with_response_body(iter(['a', 'b']))
336 result = self.call_controller_with_response_body(iter(['a', 'b']))
339 assert not self.was_cache_invalidated()
337 assert not self.was_cache_invalidated()
340 # Consume the result
338 # Consume the result
341 list(result)
339 list(result)
342 assert self.was_cache_invalidated()
340 assert self.was_cache_invalidated()
343
341
344 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
342 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
345 def test_handles_locking_exception(self, http_locked_rc):
343 def test_handles_locking_exception(self, http_locked_rc):
346 result = self.call_controller_with_response_body(
344 result = self.call_controller_with_response_body(
347 self.raise_result_iter(vcs_kind='repo_locked'))
345 self.raise_result_iter(vcs_kind='repo_locked'))
348 assert not http_locked_rc.called
346 assert not http_locked_rc.called
349 # Consume the result
347 # Consume the result
350 list(result)
348 list(result)
351 assert http_locked_rc.called
349 assert http_locked_rc.called
352
350
353 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPRequirementError')
351 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPRequirementError')
354 def test_handles_requirement_exception(self, http_requirement):
352 def test_handles_requirement_exception(self, http_requirement):
355 result = self.call_controller_with_response_body(
353 result = self.call_controller_with_response_body(
356 self.raise_result_iter(vcs_kind='requirement'))
354 self.raise_result_iter(vcs_kind='requirement'))
357 assert not http_requirement.called
355 assert not http_requirement.called
358 # Consume the result
356 # Consume the result
359 list(result)
357 list(result)
360 assert http_requirement.called
358 assert http_requirement.called
361
359
362 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
360 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
363 def test_handles_locking_exception_in_app_call(self, http_locked_rc):
361 def test_handles_locking_exception_in_app_call(self, http_locked_rc):
364 app_factory_patcher = mock.patch.object(
362 app_factory_patcher = mock.patch.object(
365 StubVCSController, '_create_wsgi_app')
363 StubVCSController, '_create_wsgi_app')
366 with app_factory_patcher as app_factory:
364 with app_factory_patcher as app_factory:
367 app_factory().side_effect = self.vcs_exception()
365 app_factory().side_effect = self.vcs_exception()
368 result = self.call_controller_with_response_body(['a'])
366 result = self.call_controller_with_response_body(['a'])
369 list(result)
367 list(result)
370 assert http_locked_rc.called
368 assert http_locked_rc.called
371
369
372 def test_raises_unknown_exceptions(self):
370 def test_raises_unknown_exceptions(self):
373 result = self.call_controller_with_response_body(
371 result = self.call_controller_with_response_body(
374 self.raise_result_iter(vcs_kind='unknown'))
372 self.raise_result_iter(vcs_kind='unknown'))
375 with pytest.raises(Exception):
373 with pytest.raises(Exception):
376 list(result)
374 list(result)
377
375
378 def test_prepare_callback_daemon_is_called(self):
376 def test_prepare_callback_daemon_is_called(self):
379 def side_effect(extras):
377 def side_effect(extras):
380 return DummyHooksCallbackDaemon(), extras
378 return DummyHooksCallbackDaemon(), extras
381
379
382 prepare_patcher = mock.patch.object(
380 prepare_patcher = mock.patch.object(
383 StubVCSController, '_prepare_callback_daemon')
381 StubVCSController, '_prepare_callback_daemon')
384 with prepare_patcher as prepare_mock:
382 with prepare_patcher as prepare_mock:
385 prepare_mock.side_effect = side_effect
383 prepare_mock.side_effect = side_effect
386 self.call_controller_with_response_body(iter(['a', 'b']))
384 self.call_controller_with_response_body(iter(['a', 'b']))
387 assert prepare_mock.called
385 assert prepare_mock.called
388 assert prepare_mock.call_count == 1
386 assert prepare_mock.call_count == 1
389
387
390 def call_controller_with_response_body(self, response_body):
388 def call_controller_with_response_body(self, response_body):
391 settings = {
389 settings = {
392 'base_path': 'fake_base_path',
390 'base_path': 'fake_base_path',
393 'vcs.hooks.protocol': 'http',
391 'vcs.hooks.protocol': 'http',
394 'vcs.hooks.direct_calls': False,
392 'vcs.hooks.direct_calls': False,
395 }
393 }
396 controller = StubVCSController(None, settings, None)
394 controller = StubVCSController(None, settings, None)
397 controller._invalidate_cache = mock.Mock()
395 controller._invalidate_cache = mock.Mock()
398 controller.stub_response_body = response_body
396 controller.stub_response_body = response_body
399 self.start_response = mock.Mock()
397 self.start_response = mock.Mock()
400 result = controller._generate_vcs_response(
398 result = controller._generate_vcs_response(
401 environ={}, start_response=self.start_response,
399 environ={}, start_response=self.start_response,
402 repo_path='fake_repo_path',
400 repo_path='fake_repo_path',
403 extras={}, action='push')
401 extras={}, action='push')
404 self.controller = controller
402 self.controller = controller
405 return result
403 return result
406
404
407 def raise_result_iter(self, vcs_kind='repo_locked'):
405 def raise_result_iter(self, vcs_kind='repo_locked'):
408 """
406 """
409 Simulates an exception due to a vcs raised exception if kind vcs_kind
407 Simulates an exception due to a vcs raised exception if kind vcs_kind
410 """
408 """
411 raise self.vcs_exception(vcs_kind=vcs_kind)
409 raise self.vcs_exception(vcs_kind=vcs_kind)
412 yield "never_reached"
410 yield "never_reached"
413
411
414 def vcs_exception(self, vcs_kind='repo_locked'):
412 def vcs_exception(self, vcs_kind='repo_locked'):
415 locked_exception = Exception('TEST_MESSAGE')
413 locked_exception = Exception('TEST_MESSAGE')
416 locked_exception._vcs_kind = vcs_kind
414 locked_exception._vcs_kind = vcs_kind
417 return locked_exception
415 return locked_exception
418
416
419 def was_cache_invalidated(self):
417 def was_cache_invalidated(self):
420 return self.controller._invalidate_cache.called
418 return self.controller._invalidate_cache.called
421
419
422
420
423 class TestInitializeGenerator:
421 class TestInitializeGenerator:
424
422
425 def test_drains_first_element(self):
423 def test_drains_first_element(self):
426 gen = self.factory(['__init__', 1, 2])
424 gen = self.factory(['__init__', 1, 2])
427 result = list(gen)
425 result = list(gen)
428 assert result == [1, 2]
426 assert result == [1, 2]
429
427
430 @pytest.mark.parametrize('values', [
428 @pytest.mark.parametrize('values', [
431 [],
429 [],
432 [1, 2],
430 [1, 2],
433 ])
431 ])
434 def test_raises_value_error(self, values):
432 def test_raises_value_error(self, values):
435 with pytest.raises(ValueError):
433 with pytest.raises(ValueError):
436 self.factory(values)
434 self.factory(values)
437
435
438 @simplevcs.initialize_generator
436 @simplevcs.initialize_generator
439 def factory(self, iterable):
437 def factory(self, iterable):
440 for elem in iterable:
438 for elem in iterable:
441 yield elem
439 yield elem
442
440
443
441
444 class TestPrepareHooksDaemon(object):
442 class TestPrepareHooksDaemon(object):
445 def test_calls_imported_prepare_callback_daemon(self, app_settings):
443 def test_calls_imported_prepare_callback_daemon(self, app_settings):
446 expected_extras = {'extra1': 'value1'}
444 expected_extras = {'extra1': 'value1'}
447 daemon = DummyHooksCallbackDaemon()
445 daemon = DummyHooksCallbackDaemon()
448
446
449 controller = StubVCSController(None, app_settings, None)
447 controller = StubVCSController(None, app_settings, None)
450 prepare_patcher = mock.patch.object(
448 prepare_patcher = mock.patch.object(
451 simplevcs, 'prepare_callback_daemon',
449 simplevcs, 'prepare_callback_daemon',
452 return_value=(daemon, expected_extras))
450 return_value=(daemon, expected_extras))
453 with prepare_patcher as prepare_mock:
451 with prepare_patcher as prepare_mock:
454 callback_daemon, extras = controller._prepare_callback_daemon(
452 callback_daemon, extras = controller._prepare_callback_daemon(
455 expected_extras.copy())
453 expected_extras.copy())
456 prepare_mock.assert_called_once_with(
454 prepare_mock.assert_called_once_with(
457 expected_extras,
455 expected_extras,
458 protocol=app_settings['vcs.hooks.protocol'],
456 protocol=app_settings['vcs.hooks.protocol'],
459 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
457 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
460
458
461 assert callback_daemon == daemon
459 assert callback_daemon == daemon
462 assert extras == extras
460 assert extras == extras
@@ -1,61 +1,59 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24
24
25 @pytest.mark.usefixtures('autologin_user', 'app')
25 @pytest.mark.usefixtures('autologin_user', 'app')
26 def test_vcs_available_returns_summary_page(app, backend):
26 def test_vcs_available_returns_summary_page(app, backend):
27 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
27 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
28 response = app.get(url)
28 response = app.get(url)
29 assert response.status_code == 200
29 assert response.status_code == 200
30 assert 'Summary' in response.body
30 assert 'Summary' in response.body
31
31
32
32
33 @pytest.mark.usefixtures('autologin_user', 'app')
33 @pytest.mark.usefixtures('autologin_user', 'app')
34 def test_vcs_unavailable_returns_vcs_error_page(app, backend, app_settings):
34 def test_vcs_unavailable_returns_vcs_error_page(app, backend, app_settings):
35 from rhodecode.lib.vcs.exceptions import VCSCommunicationError
35 from rhodecode.lib.vcs.exceptions import VCSCommunicationError
36 from rhodecode.lib.middleware.error_handling import (
36 from rhodecode.lib.middleware.error_handling import (
37 PylonsErrorHandlingMiddleware)
37 PylonsErrorHandlingMiddleware)
38
38
39 # Depending on the used VCSServer protocol we have to patch a different
39 # Depending on the used VCSServer protocol we have to patch a different
40 # RemoteRepo class to raise an exception. For the test it doesn't matter
40 # RemoteRepo class to raise an exception. For the test it doesn't matter
41 # if http or pyro4 is used, it just requires the exception to be raised.
41 # if http is used, it just requires the exception to be raised.
42 vcs_protocol = app_settings['vcs.server.protocol']
42 vcs_protocol = app_settings['vcs.server.protocol']
43 if vcs_protocol == 'http':
43 if vcs_protocol == 'http':
44 from rhodecode.lib.vcs.client_http import RemoteRepo
44 from rhodecode.lib.vcs.client_http import RemoteRepo
45 elif vcs_protocol == 'pyro4':
46 from rhodecode.lib.vcs.client import RemoteRepo
47 else:
45 else:
48 pytest.fail('Unknown VCS server protocol: "{}"'.format(vcs_protocol))
46 pytest.fail('Unknown VCS server protocol: "{}"'.format(vcs_protocol))
49
47
50 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
48 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
51
49
52 # Patch remote repo to raise an exception instead of making a RPC.
50 # Patch remote repo to raise an exception instead of making a RPC.
53 with mock.patch.object(RemoteRepo, '__getattr__') as remote_mock:
51 with mock.patch.object(RemoteRepo, '__getattr__') as remote_mock:
54 remote_mock.side_effect = VCSCommunicationError()
52 remote_mock.side_effect = VCSCommunicationError()
55 # Patch pylons error handling middleware to not re-raise exceptions.
53 # Patch pylons error handling middleware to not re-raise exceptions.
56 with mock.patch.object(PylonsErrorHandlingMiddleware, 'reraise') as r:
54 with mock.patch.object(PylonsErrorHandlingMiddleware, 'reraise') as r:
57 r.return_value = False
55 r.return_value = False
58 response = app.get(url, expect_errors=True)
56 response = app.get(url, expect_errors=True)
59
57
60 assert response.status_code == 502
58 assert response.status_code == 502
61 assert 'Could not connect to VCS Server' in response.body
59 assert 'Could not connect to VCS Server' in response.body
@@ -1,130 +1,83 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
22 import Pyro4
23 import pytest
21 import pytest
24
22
25 from rhodecode.tests.utils import CustomTestApp
23 from rhodecode.tests.utils import CustomTestApp
26 from rhodecode.lib.middleware.utils import scm_app_http, scm_app
24 from rhodecode.lib.middleware.utils import scm_app_http, scm_app
27 from rhodecode.lib.vcs.conf import settings
25 from rhodecode.lib.vcs.conf import settings
28
26
29
27
30 def vcs_http_app(vcsserver_http_echo_app):
28 def vcs_http_app(vcsserver_http_echo_app):
31 """
29 """
32 VcsHttpProxy wrapped in WebTest.
30 VcsHttpProxy wrapped in WebTest.
33 """
31 """
34 git_url = vcsserver_http_echo_app.http_url + 'stream/git/'
32 git_url = vcsserver_http_echo_app.http_url + 'stream/git/'
35 vcs_http_proxy = scm_app_http.VcsHttpProxy(
33 vcs_http_proxy = scm_app_http.VcsHttpProxy(
36 git_url, 'stub_path', 'stub_name', None)
34 git_url, 'stub_path', 'stub_name', None)
37 app = CustomTestApp(vcs_http_proxy)
35 app = CustomTestApp(vcs_http_proxy)
38 return app
36 return app
39
37
40
38
41 @pytest.fixture(scope='module')
39 @pytest.fixture(scope='module')
42 def vcsserver_http_echo_app(request, vcsserver_factory):
40 def vcsserver_http_echo_app(request, vcsserver_factory):
43 """
41 """
44 A running VCSServer with the EchoApp activated via HTTP.
42 A running VCSServer with the EchoApp activated via HTTP.
45 """
43 """
46 vcsserver = vcsserver_factory(
44 vcsserver = vcsserver_factory(
47 request=request,
45 request=request,
48 use_http=True,
46 use_http=True,
49 overrides=[{'app:main': {'dev.use_echo_app': 'true'}}])
47 overrides=[{'app:main': {'dev.use_echo_app': 'true'}}])
50 return vcsserver
48 return vcsserver
51
49
52
50
53 @pytest.fixture(scope='session')
51 @pytest.fixture(scope='session')
54 def data():
52 def data():
55 one_kb = "x" * 1024
53 one_kb = "x" * 1024
56 return one_kb * 1024 * 10
54 return one_kb * 1024 * 10
57
55
58
56
59 def test_reuse_app_no_data(repeat, vcsserver_http_echo_app):
57 def test_reuse_app_no_data(repeat, vcsserver_http_echo_app):
60 app = vcs_http_app(vcsserver_http_echo_app)
58 app = vcs_http_app(vcsserver_http_echo_app)
61 for x in xrange(repeat / 10):
59 for x in xrange(repeat / 10):
62 response = app.post('/')
60 response = app.post('/')
63 assert response.status_code == 200
61 assert response.status_code == 200
64
62
65
63
66 def test_reuse_app_with_data(data, repeat, vcsserver_http_echo_app):
64 def test_reuse_app_with_data(data, repeat, vcsserver_http_echo_app):
67 app = vcs_http_app(vcsserver_http_echo_app)
65 app = vcs_http_app(vcsserver_http_echo_app)
68 for x in xrange(repeat / 10):
66 for x in xrange(repeat / 10):
69 response = app.post('/', params=data)
67 response = app.post('/', params=data)
70 assert response.status_code == 200
68 assert response.status_code == 200
71
69
72
70
73 def test_create_app_per_request_no_data(repeat, vcsserver_http_echo_app):
71 def test_create_app_per_request_no_data(repeat, vcsserver_http_echo_app):
74 for x in xrange(repeat / 10):
72 for x in xrange(repeat / 10):
75 app = vcs_http_app(vcsserver_http_echo_app)
73 app = vcs_http_app(vcsserver_http_echo_app)
76 response = app.post('/')
74 response = app.post('/')
77 assert response.status_code == 200
75 assert response.status_code == 200
78
76
79
77
80 def test_create_app_per_request_with_data(
78 def test_create_app_per_request_with_data(
81 data, repeat, vcsserver_http_echo_app):
79 data, repeat, vcsserver_http_echo_app):
82 for x in xrange(repeat / 10):
80 for x in xrange(repeat / 10):
83 app = vcs_http_app(vcsserver_http_echo_app)
81 app = vcs_http_app(vcsserver_http_echo_app)
84 response = app.post('/', params=data)
82 response = app.post('/', params=data)
85 assert response.status_code == 200
83 assert response.status_code == 200
86
87
88 @pytest.fixture(scope='module')
89 def vcsserver_pyro_echo_app(request, vcsserver_factory):
90 """
91 A running VCSServer with the EchoApp activated via Pyro4.
92 """
93 vcsserver = vcsserver_factory(
94 request=request,
95 use_http=False,
96 overrides=[{'DEFAULT': {'dev.use_echo_app': 'true'}}])
97 return vcsserver
98
99
100 def vcs_pyro4_app(vcsserver_pyro_echo_app):
101 """
102 Pyro4 based Vcs proxy wrapped in WebTest
103 """
104 stub_config = {
105 'git_update_server_info': 'stub',
106 }
107 server_and_port = vcsserver_pyro_echo_app.server_and_port
108 GIT_REMOTE_WSGI = Pyro4.Proxy(
109 settings.pyro_remote(
110 settings.PYRO_GIT_REMOTE_WSGI, server_and_port))
111 with mock.patch('rhodecode.lib.middleware.utils.scm_app.GIT_REMOTE_WSGI',
112 GIT_REMOTE_WSGI):
113 pyro4_app = scm_app.create_git_wsgi_app(
114 'stub_path', 'stub_name', stub_config)
115 app = CustomTestApp(pyro4_app)
116 return app
117
118
119 def test_pyro4_no_data(repeat, pylonsapp, vcsserver_pyro_echo_app):
120 for x in xrange(repeat / 10):
121 app = vcs_pyro4_app(vcsserver_pyro_echo_app)
122 response = app.post('/')
123 assert response.status_code == 200
124
125
126 def test_pyro4_with_data(repeat, pylonsapp, vcsserver_pyro_echo_app, data):
127 for x in xrange(repeat / 10):
128 app = vcs_pyro4_app(vcsserver_pyro_echo_app)
129 response = app.post('/', params=data)
130 assert response.status_code == 200
@@ -1,388 +1,319 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import logging
22 import logging
23 from StringIO import StringIO
23 from StringIO import StringIO
24
24
25 import mock
25 import mock
26 import pytest
26 import pytest
27
27
28 from rhodecode.lib import hooks_daemon
28 from rhodecode.lib import hooks_daemon
29 from rhodecode.tests.utils import assert_message_in_log
29 from rhodecode.tests.utils import assert_message_in_log
30
30
31
31
32 class TestDummyHooksCallbackDaemon(object):
32 class TestDummyHooksCallbackDaemon(object):
33 def test_hooks_module_path_set_properly(self):
33 def test_hooks_module_path_set_properly(self):
34 daemon = hooks_daemon.DummyHooksCallbackDaemon()
34 daemon = hooks_daemon.DummyHooksCallbackDaemon()
35 assert daemon.hooks_module == 'rhodecode.lib.hooks_daemon'
35 assert daemon.hooks_module == 'rhodecode.lib.hooks_daemon'
36
36
37 def test_logs_entering_the_hook(self):
37 def test_logs_entering_the_hook(self):
38 daemon = hooks_daemon.DummyHooksCallbackDaemon()
38 daemon = hooks_daemon.DummyHooksCallbackDaemon()
39 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
39 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
40 with daemon as return_value:
40 with daemon as return_value:
41 log_mock.assert_called_once_with(
41 log_mock.assert_called_once_with(
42 'Running dummy hooks callback daemon')
42 'Running dummy hooks callback daemon')
43 assert return_value == daemon
43 assert return_value == daemon
44
44
45 def test_logs_exiting_the_hook(self):
45 def test_logs_exiting_the_hook(self):
46 daemon = hooks_daemon.DummyHooksCallbackDaemon()
46 daemon = hooks_daemon.DummyHooksCallbackDaemon()
47 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
47 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
48 with daemon:
48 with daemon:
49 pass
49 pass
50 log_mock.assert_called_with('Exiting dummy hooks callback daemon')
50 log_mock.assert_called_with('Exiting dummy hooks callback daemon')
51
51
52
52
53 class TestHooks(object):
53 class TestHooks(object):
54 def test_hooks_can_be_used_as_a_context_processor(self):
54 def test_hooks_can_be_used_as_a_context_processor(self):
55 hooks = hooks_daemon.Hooks()
55 hooks = hooks_daemon.Hooks()
56 with hooks as return_value:
56 with hooks as return_value:
57 pass
57 pass
58 assert hooks == return_value
58 assert hooks == return_value
59
59
60
60
61 class TestHooksHttpHandler(object):
61 class TestHooksHttpHandler(object):
62 def test_read_request_parses_method_name_and_arguments(self):
62 def test_read_request_parses_method_name_and_arguments(self):
63 data = {
63 data = {
64 'method': 'test',
64 'method': 'test',
65 'extras': {
65 'extras': {
66 'param1': 1,
66 'param1': 1,
67 'param2': 'a'
67 'param2': 'a'
68 }
68 }
69 }
69 }
70 request = self._generate_post_request(data)
70 request = self._generate_post_request(data)
71 hooks_patcher = mock.patch.object(
71 hooks_patcher = mock.patch.object(
72 hooks_daemon.Hooks, data['method'], create=True, return_value=1)
72 hooks_daemon.Hooks, data['method'], create=True, return_value=1)
73
73
74 with hooks_patcher as hooks_mock:
74 with hooks_patcher as hooks_mock:
75 MockServer(hooks_daemon.HooksHttpHandler, request)
75 MockServer(hooks_daemon.HooksHttpHandler, request)
76
76
77 hooks_mock.assert_called_once_with(data['extras'])
77 hooks_mock.assert_called_once_with(data['extras'])
78
78
79 def test_hooks_serialized_result_is_returned(self):
79 def test_hooks_serialized_result_is_returned(self):
80 request = self._generate_post_request({})
80 request = self._generate_post_request({})
81 rpc_method = 'test'
81 rpc_method = 'test'
82 hook_result = {
82 hook_result = {
83 'first': 'one',
83 'first': 'one',
84 'second': 2
84 'second': 2
85 }
85 }
86 read_patcher = mock.patch.object(
86 read_patcher = mock.patch.object(
87 hooks_daemon.HooksHttpHandler, '_read_request',
87 hooks_daemon.HooksHttpHandler, '_read_request',
88 return_value=(rpc_method, {}))
88 return_value=(rpc_method, {}))
89 hooks_patcher = mock.patch.object(
89 hooks_patcher = mock.patch.object(
90 hooks_daemon.Hooks, rpc_method, create=True,
90 hooks_daemon.Hooks, rpc_method, create=True,
91 return_value=hook_result)
91 return_value=hook_result)
92
92
93 with read_patcher, hooks_patcher:
93 with read_patcher, hooks_patcher:
94 server = MockServer(hooks_daemon.HooksHttpHandler, request)
94 server = MockServer(hooks_daemon.HooksHttpHandler, request)
95
95
96 expected_result = json.dumps(hook_result)
96 expected_result = json.dumps(hook_result)
97 assert server.request.output_stream.buflist[-1] == expected_result
97 assert server.request.output_stream.buflist[-1] == expected_result
98
98
99 def test_exception_is_returned_in_response(self):
99 def test_exception_is_returned_in_response(self):
100 request = self._generate_post_request({})
100 request = self._generate_post_request({})
101 rpc_method = 'test'
101 rpc_method = 'test'
102 read_patcher = mock.patch.object(
102 read_patcher = mock.patch.object(
103 hooks_daemon.HooksHttpHandler, '_read_request',
103 hooks_daemon.HooksHttpHandler, '_read_request',
104 return_value=(rpc_method, {}))
104 return_value=(rpc_method, {}))
105 hooks_patcher = mock.patch.object(
105 hooks_patcher = mock.patch.object(
106 hooks_daemon.Hooks, rpc_method, create=True,
106 hooks_daemon.Hooks, rpc_method, create=True,
107 side_effect=Exception('Test exception'))
107 side_effect=Exception('Test exception'))
108
108
109 with read_patcher, hooks_patcher:
109 with read_patcher, hooks_patcher:
110 server = MockServer(hooks_daemon.HooksHttpHandler, request)
110 server = MockServer(hooks_daemon.HooksHttpHandler, request)
111
111
112 expected_result = json.dumps({
112 expected_result = json.dumps({
113 'exception': 'Exception',
113 'exception': 'Exception',
114 'exception_args': ('Test exception', )
114 'exception_args': ('Test exception', )
115 })
115 })
116 assert server.request.output_stream.buflist[-1] == expected_result
116 assert server.request.output_stream.buflist[-1] == expected_result
117
117
118 def test_log_message_writes_to_debug_log(self, caplog):
118 def test_log_message_writes_to_debug_log(self, caplog):
119 ip_port = ('0.0.0.0', 8888)
119 ip_port = ('0.0.0.0', 8888)
120 handler = hooks_daemon.HooksHttpHandler(
120 handler = hooks_daemon.HooksHttpHandler(
121 MockRequest('POST /'), ip_port, mock.Mock())
121 MockRequest('POST /'), ip_port, mock.Mock())
122 fake_date = '1/Nov/2015 00:00:00'
122 fake_date = '1/Nov/2015 00:00:00'
123 date_patcher = mock.patch.object(
123 date_patcher = mock.patch.object(
124 handler, 'log_date_time_string', return_value=fake_date)
124 handler, 'log_date_time_string', return_value=fake_date)
125 with date_patcher, caplog.at_level(logging.DEBUG):
125 with date_patcher, caplog.at_level(logging.DEBUG):
126 handler.log_message('Some message %d, %s', 123, 'string')
126 handler.log_message('Some message %d, %s', 123, 'string')
127
127
128 expected_message = '{} - - [{}] Some message 123, string'.format(
128 expected_message = '{} - - [{}] Some message 123, string'.format(
129 ip_port[0], fake_date)
129 ip_port[0], fake_date)
130 assert_message_in_log(
130 assert_message_in_log(
131 caplog.records, expected_message,
131 caplog.records, expected_message,
132 levelno=logging.DEBUG, module='hooks_daemon')
132 levelno=logging.DEBUG, module='hooks_daemon')
133
133
134 def _generate_post_request(self, data):
134 def _generate_post_request(self, data):
135 payload = json.dumps(data)
135 payload = json.dumps(data)
136 return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format(
136 return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format(
137 len(payload), payload)
137 len(payload), payload)
138
138
139
139
140 class ThreadedHookCallbackDaemon(object):
140 class ThreadedHookCallbackDaemon(object):
141 def test_constructor_calls_prepare(self):
141 def test_constructor_calls_prepare(self):
142 prepare_daemon_patcher = mock.patch.object(
142 prepare_daemon_patcher = mock.patch.object(
143 hooks_daemon.ThreadedHookCallbackDaemon, '_prepare')
143 hooks_daemon.ThreadedHookCallbackDaemon, '_prepare')
144 with prepare_daemon_patcher as prepare_daemon_mock:
144 with prepare_daemon_patcher as prepare_daemon_mock:
145 hooks_daemon.ThreadedHookCallbackDaemon()
145 hooks_daemon.ThreadedHookCallbackDaemon()
146 prepare_daemon_mock.assert_called_once_with()
146 prepare_daemon_mock.assert_called_once_with()
147
147
148 def test_run_is_called_on_context_start(self):
148 def test_run_is_called_on_context_start(self):
149 patchers = mock.patch.multiple(
149 patchers = mock.patch.multiple(
150 hooks_daemon.ThreadedHookCallbackDaemon,
150 hooks_daemon.ThreadedHookCallbackDaemon,
151 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
151 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
152
152
153 with patchers as mocks:
153 with patchers as mocks:
154 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
154 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
155 with daemon as daemon_context:
155 with daemon as daemon_context:
156 pass
156 pass
157 mocks['_run'].assert_called_once_with()
157 mocks['_run'].assert_called_once_with()
158 assert daemon_context == daemon
158 assert daemon_context == daemon
159
159
160 def test_stop_is_called_on_context_exit(self):
160 def test_stop_is_called_on_context_exit(self):
161 patchers = mock.patch.multiple(
161 patchers = mock.patch.multiple(
162 hooks_daemon.ThreadedHookCallbackDaemon,
162 hooks_daemon.ThreadedHookCallbackDaemon,
163 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
163 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
164
164
165 with patchers as mocks:
165 with patchers as mocks:
166 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
166 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
167 with daemon as daemon_context:
167 with daemon as daemon_context:
168 assert mocks['_stop'].call_count == 0
168 assert mocks['_stop'].call_count == 0
169
169
170 mocks['_stop'].assert_called_once_with()
170 mocks['_stop'].assert_called_once_with()
171 assert daemon_context == daemon
171 assert daemon_context == daemon
172
172
173
173
174 class TestPyro4HooksCallbackDaemon(object):
175 def test_prepare_inits_pyro4_and_registers_hooks(self, caplog):
176 pyro4_daemon = mock.Mock()
177
178 with self._pyro4_patcher(pyro4_daemon), caplog.at_level(logging.DEBUG):
179 daemon = hooks_daemon.Pyro4HooksCallbackDaemon()
180
181 assert daemon._daemon == pyro4_daemon
182
183 assert pyro4_daemon.register.call_count == 1
184 args, kwargs = pyro4_daemon.register.call_args
185 assert len(args) == 1
186 assert isinstance(args[0], hooks_daemon.Hooks)
187
188 assert_message_in_log(
189 caplog.records,
190 'Preparing callback daemon and registering hook object',
191 levelno=logging.DEBUG, module='hooks_daemon')
192
193 def test_run_creates_a_thread(self):
194 thread = mock.Mock()
195 pyro4_daemon = mock.Mock()
196
197 with self._pyro4_patcher(pyro4_daemon):
198 daemon = hooks_daemon.Pyro4HooksCallbackDaemon()
199
200 with self._thread_patcher(thread) as thread_mock:
201 daemon._run()
202
203 assert thread_mock.call_count == 1
204 args, kwargs = thread_mock.call_args
205 assert args == ()
206 assert kwargs['target'] == pyro4_daemon.requestLoop
207 assert kwargs['kwargs']['loopCondition']() is True
208
209 def test_stop_cleans_up_the_connection(self, caplog):
210 thread = mock.Mock()
211 pyro4_daemon = mock.Mock()
212
213 with self._pyro4_patcher(pyro4_daemon):
214 daemon = hooks_daemon.Pyro4HooksCallbackDaemon()
215
216 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
217 with daemon:
218 assert daemon._daemon == pyro4_daemon
219 assert daemon._callback_thread == thread
220
221 assert daemon._daemon is None
222 assert daemon._callback_thread is None
223 pyro4_daemon.close.assert_called_with()
224 thread.join.assert_called_once_with()
225
226 assert_message_in_log(
227 caplog.records, 'Waiting for background thread to finish.',
228 levelno=logging.DEBUG, module='hooks_daemon')
229
230 def _pyro4_patcher(self, daemon):
231 return mock.patch.object(
232 hooks_daemon.Pyro4, 'Daemon', return_value=daemon)
233
234 def _thread_patcher(self, thread):
235 return mock.patch.object(
236 hooks_daemon.threading, 'Thread', return_value=thread)
237
238
239 class TestHttpHooksCallbackDaemon(object):
174 class TestHttpHooksCallbackDaemon(object):
240 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
175 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
241 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
176 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
242 daemon = hooks_daemon.HttpHooksCallbackDaemon()
177 daemon = hooks_daemon.HttpHooksCallbackDaemon()
243 assert daemon._daemon == tcp_server
178 assert daemon._daemon == tcp_server
244
179
245 assert_message_in_log(
180 assert_message_in_log(
246 caplog.records,
181 caplog.records,
247 'Preparing callback daemon and registering hook object',
182 'Preparing callback daemon and registering hook object',
248 levelno=logging.DEBUG, module='hooks_daemon')
183 levelno=logging.DEBUG, module='hooks_daemon')
249
184
250 def test_prepare_inits_hooks_uri_and_logs_it(
185 def test_prepare_inits_hooks_uri_and_logs_it(
251 self, tcp_server, caplog):
186 self, tcp_server, caplog):
252 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
187 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
253 daemon = hooks_daemon.HttpHooksCallbackDaemon()
188 daemon = hooks_daemon.HttpHooksCallbackDaemon()
254
189
255 _, port = tcp_server.server_address
190 _, port = tcp_server.server_address
256 expected_uri = '{}:{}'.format(daemon.IP_ADDRESS, port)
191 expected_uri = '{}:{}'.format(daemon.IP_ADDRESS, port)
257 assert daemon.hooks_uri == expected_uri
192 assert daemon.hooks_uri == expected_uri
258
193
259 assert_message_in_log(
194 assert_message_in_log(
260 caplog.records, 'Hooks uri is: {}'.format(expected_uri),
195 caplog.records, 'Hooks uri is: {}'.format(expected_uri),
261 levelno=logging.DEBUG, module='hooks_daemon')
196 levelno=logging.DEBUG, module='hooks_daemon')
262
197
263 def test_run_creates_a_thread(self, tcp_server):
198 def test_run_creates_a_thread(self, tcp_server):
264 thread = mock.Mock()
199 thread = mock.Mock()
265
200
266 with self._tcp_patcher(tcp_server):
201 with self._tcp_patcher(tcp_server):
267 daemon = hooks_daemon.HttpHooksCallbackDaemon()
202 daemon = hooks_daemon.HttpHooksCallbackDaemon()
268
203
269 with self._thread_patcher(thread) as thread_mock:
204 with self._thread_patcher(thread) as thread_mock:
270 daemon._run()
205 daemon._run()
271
206
272 thread_mock.assert_called_once_with(
207 thread_mock.assert_called_once_with(
273 target=tcp_server.serve_forever,
208 target=tcp_server.serve_forever,
274 kwargs={'poll_interval': daemon.POLL_INTERVAL})
209 kwargs={'poll_interval': daemon.POLL_INTERVAL})
275 assert thread.daemon is True
210 assert thread.daemon is True
276 thread.start.assert_called_once_with()
211 thread.start.assert_called_once_with()
277
212
278 def test_run_logs(self, tcp_server, caplog):
213 def test_run_logs(self, tcp_server, caplog):
279
214
280 with self._tcp_patcher(tcp_server):
215 with self._tcp_patcher(tcp_server):
281 daemon = hooks_daemon.HttpHooksCallbackDaemon()
216 daemon = hooks_daemon.HttpHooksCallbackDaemon()
282
217
283 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
218 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
284 daemon._run()
219 daemon._run()
285
220
286 assert_message_in_log(
221 assert_message_in_log(
287 caplog.records,
222 caplog.records,
288 'Running event loop of callback daemon in background thread',
223 'Running event loop of callback daemon in background thread',
289 levelno=logging.DEBUG, module='hooks_daemon')
224 levelno=logging.DEBUG, module='hooks_daemon')
290
225
291 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
226 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
292 thread = mock.Mock()
227 thread = mock.Mock()
293
228
294 with self._tcp_patcher(tcp_server):
229 with self._tcp_patcher(tcp_server):
295 daemon = hooks_daemon.HttpHooksCallbackDaemon()
230 daemon = hooks_daemon.HttpHooksCallbackDaemon()
296
231
297 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
232 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
298 with daemon:
233 with daemon:
299 assert daemon._daemon == tcp_server
234 assert daemon._daemon == tcp_server
300 assert daemon._callback_thread == thread
235 assert daemon._callback_thread == thread
301
236
302 assert daemon._daemon is None
237 assert daemon._daemon is None
303 assert daemon._callback_thread is None
238 assert daemon._callback_thread is None
304 tcp_server.shutdown.assert_called_with()
239 tcp_server.shutdown.assert_called_with()
305 thread.join.assert_called_once_with()
240 thread.join.assert_called_once_with()
306
241
307 assert_message_in_log(
242 assert_message_in_log(
308 caplog.records, 'Waiting for background thread to finish.',
243 caplog.records, 'Waiting for background thread to finish.',
309 levelno=logging.DEBUG, module='hooks_daemon')
244 levelno=logging.DEBUG, module='hooks_daemon')
310
245
311 def _tcp_patcher(self, tcp_server):
246 def _tcp_patcher(self, tcp_server):
312 return mock.patch.object(
247 return mock.patch.object(
313 hooks_daemon, 'TCPServer', return_value=tcp_server)
248 hooks_daemon, 'TCPServer', return_value=tcp_server)
314
249
315 def _thread_patcher(self, thread):
250 def _thread_patcher(self, thread):
316 return mock.patch.object(
251 return mock.patch.object(
317 hooks_daemon.threading, 'Thread', return_value=thread)
252 hooks_daemon.threading, 'Thread', return_value=thread)
318
253
319
254
320 class TestPrepareHooksDaemon(object):
255 class TestPrepareHooksDaemon(object):
321 @pytest.mark.parametrize('protocol', ('http', 'pyro4'))
256 @pytest.mark.parametrize('protocol', ('http',))
322 def test_returns_dummy_hooks_callback_daemon_when_using_direct_calls(
257 def test_returns_dummy_hooks_callback_daemon_when_using_direct_calls(
323 self, protocol):
258 self, protocol):
324 expected_extras = {'extra1': 'value1'}
259 expected_extras = {'extra1': 'value1'}
325 callback, extras = hooks_daemon.prepare_callback_daemon(
260 callback, extras = hooks_daemon.prepare_callback_daemon(
326 expected_extras.copy(), protocol=protocol, use_direct_calls=True)
261 expected_extras.copy(), protocol=protocol, use_direct_calls=True)
327 assert isinstance(callback, hooks_daemon.DummyHooksCallbackDaemon)
262 assert isinstance(callback, hooks_daemon.DummyHooksCallbackDaemon)
328 expected_extras['hooks_module'] = 'rhodecode.lib.hooks_daemon'
263 expected_extras['hooks_module'] = 'rhodecode.lib.hooks_daemon'
329 assert extras == expected_extras
264 assert extras == expected_extras
330
265
331 @pytest.mark.parametrize('protocol, expected_class', (
266 @pytest.mark.parametrize('protocol, expected_class', (
332 ('pyro4', hooks_daemon.Pyro4HooksCallbackDaemon),
267 ('http', hooks_daemon.HttpHooksCallbackDaemon),
333 ('http', hooks_daemon.HttpHooksCallbackDaemon)
334 ))
268 ))
335 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
269 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
336 self, protocol, expected_class):
270 self, protocol, expected_class):
337 expected_extras = {
271 expected_extras = {
338 'extra1': 'value1',
272 'extra1': 'value1',
339 'hooks_protocol': protocol.lower()
273 'hooks_protocol': protocol.lower()
340 }
274 }
341 callback, extras = hooks_daemon.prepare_callback_daemon(
275 callback, extras = hooks_daemon.prepare_callback_daemon(
342 expected_extras.copy(), protocol=protocol, use_direct_calls=False)
276 expected_extras.copy(), protocol=protocol, use_direct_calls=False)
343 assert isinstance(callback, expected_class)
277 assert isinstance(callback, expected_class)
344 hooks_uri = extras.pop('hooks_uri')
278 hooks_uri = extras.pop('hooks_uri')
345 assert extras == expected_extras
279 assert extras == expected_extras
346 if protocol.lower() == 'pyro4':
347 assert hooks_uri.startswith('PYRO')
348
280
349 @pytest.mark.parametrize('protocol', (
281 @pytest.mark.parametrize('protocol', (
350 'invalid',
282 'invalid',
351 'Pyro4',
352 'Http',
283 'Http',
353 'HTTP',
284 'HTTP',
354 ))
285 ))
355 def test_raises_on_invalid_protocol(self, protocol):
286 def test_raises_on_invalid_protocol(self, protocol):
356 expected_extras = {
287 expected_extras = {
357 'extra1': 'value1',
288 'extra1': 'value1',
358 'hooks_protocol': protocol.lower()
289 'hooks_protocol': protocol.lower()
359 }
290 }
360 with pytest.raises(Exception):
291 with pytest.raises(Exception):
361 callback, extras = hooks_daemon.prepare_callback_daemon(
292 callback, extras = hooks_daemon.prepare_callback_daemon(
362 expected_extras.copy(),
293 expected_extras.copy(),
363 protocol=protocol,
294 protocol=protocol,
364 use_direct_calls=False)
295 use_direct_calls=False)
365
296
366
297
367 class MockRequest(object):
298 class MockRequest(object):
368 def __init__(self, request):
299 def __init__(self, request):
369 self.request = request
300 self.request = request
370 self.input_stream = StringIO(b'{}'.format(self.request))
301 self.input_stream = StringIO(b'{}'.format(self.request))
371 self.output_stream = StringIO()
302 self.output_stream = StringIO()
372
303
373 def makefile(self, mode, *args, **kwargs):
304 def makefile(self, mode, *args, **kwargs):
374 return self.output_stream if mode == 'wb' else self.input_stream
305 return self.output_stream if mode == 'wb' else self.input_stream
375
306
376
307
377 class MockServer(object):
308 class MockServer(object):
378 def __init__(self, Handler, request):
309 def __init__(self, Handler, request):
379 ip_port = ('0.0.0.0', 8888)
310 ip_port = ('0.0.0.0', 8888)
380 self.request = MockRequest(request)
311 self.request = MockRequest(request)
381 self.handler = Handler(self.request, ip_port, self)
312 self.handler = Handler(self.request, ip_port, self)
382
313
383
314
384 @pytest.fixture
315 @pytest.fixture
385 def tcp_server():
316 def tcp_server():
386 server = mock.Mock()
317 server = mock.Mock()
387 server.server_address = ('127.0.0.1', 8881)
318 server.server_address = ('127.0.0.1', 8881)
388 return server
319 return server
@@ -1,852 +1,851 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 class TestPullRequestModel:
44 class TestPullRequestModel:
45
45
46 @pytest.fixture
46 @pytest.fixture
47 def pull_request(self, request, backend, pr_util):
47 def pull_request(self, request, backend, pr_util):
48 """
48 """
49 A pull request combined with multiples patches.
49 A pull request combined with multiples patches.
50 """
50 """
51 BackendClass = get_backend(backend.alias)
51 BackendClass = get_backend(backend.alias)
52 self.merge_patcher = mock.patch.object(BackendClass, 'merge')
52 self.merge_patcher = mock.patch.object(BackendClass, 'merge')
53 self.workspace_remove_patcher = mock.patch.object(
53 self.workspace_remove_patcher = mock.patch.object(
54 BackendClass, 'cleanup_merge_workspace')
54 BackendClass, 'cleanup_merge_workspace')
55
55
56 self.workspace_remove_mock = self.workspace_remove_patcher.start()
56 self.workspace_remove_mock = self.workspace_remove_patcher.start()
57 self.merge_mock = self.merge_patcher.start()
57 self.merge_mock = self.merge_patcher.start()
58 self.comment_patcher = mock.patch(
58 self.comment_patcher = mock.patch(
59 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
59 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
60 self.comment_patcher.start()
60 self.comment_patcher.start()
61 self.notification_patcher = mock.patch(
61 self.notification_patcher = mock.patch(
62 'rhodecode.model.notification.NotificationModel.create')
62 'rhodecode.model.notification.NotificationModel.create')
63 self.notification_patcher.start()
63 self.notification_patcher.start()
64 self.helper_patcher = mock.patch(
64 self.helper_patcher = mock.patch(
65 'rhodecode.lib.helpers.url')
65 'rhodecode.lib.helpers.url')
66 self.helper_patcher.start()
66 self.helper_patcher.start()
67
67
68 self.hook_patcher = mock.patch.object(PullRequestModel,
68 self.hook_patcher = mock.patch.object(PullRequestModel,
69 '_trigger_pull_request_hook')
69 '_trigger_pull_request_hook')
70 self.hook_mock = self.hook_patcher.start()
70 self.hook_mock = self.hook_patcher.start()
71
71
72 self.invalidation_patcher = mock.patch(
72 self.invalidation_patcher = mock.patch(
73 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
73 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
74 self.invalidation_mock = self.invalidation_patcher.start()
74 self.invalidation_mock = self.invalidation_patcher.start()
75
75
76 self.pull_request = pr_util.create_pull_request(
76 self.pull_request = pr_util.create_pull_request(
77 mergeable=True, name_suffix=u'ąć')
77 mergeable=True, name_suffix=u'ąć')
78 self.source_commit = self.pull_request.source_ref_parts.commit_id
78 self.source_commit = self.pull_request.source_ref_parts.commit_id
79 self.target_commit = self.pull_request.target_ref_parts.commit_id
79 self.target_commit = self.pull_request.target_ref_parts.commit_id
80 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
80 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
81
81
82 @request.addfinalizer
82 @request.addfinalizer
83 def cleanup_pull_request():
83 def cleanup_pull_request():
84 calls = [mock.call(
84 calls = [mock.call(
85 self.pull_request, self.pull_request.author, 'create')]
85 self.pull_request, self.pull_request.author, 'create')]
86 self.hook_mock.assert_has_calls(calls)
86 self.hook_mock.assert_has_calls(calls)
87
87
88 self.workspace_remove_patcher.stop()
88 self.workspace_remove_patcher.stop()
89 self.merge_patcher.stop()
89 self.merge_patcher.stop()
90 self.comment_patcher.stop()
90 self.comment_patcher.stop()
91 self.notification_patcher.stop()
91 self.notification_patcher.stop()
92 self.helper_patcher.stop()
92 self.helper_patcher.stop()
93 self.hook_patcher.stop()
93 self.hook_patcher.stop()
94 self.invalidation_patcher.stop()
94 self.invalidation_patcher.stop()
95
95
96 return self.pull_request
96 return self.pull_request
97
97
98 def test_get_all(self, pull_request):
98 def test_get_all(self, pull_request):
99 prs = PullRequestModel().get_all(pull_request.target_repo)
99 prs = PullRequestModel().get_all(pull_request.target_repo)
100 assert isinstance(prs, list)
100 assert isinstance(prs, list)
101 assert len(prs) == 1
101 assert len(prs) == 1
102
102
103 def test_count_all(self, pull_request):
103 def test_count_all(self, pull_request):
104 pr_count = PullRequestModel().count_all(pull_request.target_repo)
104 pr_count = PullRequestModel().count_all(pull_request.target_repo)
105 assert pr_count == 1
105 assert pr_count == 1
106
106
107 def test_get_awaiting_review(self, pull_request):
107 def test_get_awaiting_review(self, pull_request):
108 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
108 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
109 assert isinstance(prs, list)
109 assert isinstance(prs, list)
110 assert len(prs) == 1
110 assert len(prs) == 1
111
111
112 def test_count_awaiting_review(self, pull_request):
112 def test_count_awaiting_review(self, pull_request):
113 pr_count = PullRequestModel().count_awaiting_review(
113 pr_count = PullRequestModel().count_awaiting_review(
114 pull_request.target_repo)
114 pull_request.target_repo)
115 assert pr_count == 1
115 assert pr_count == 1
116
116
117 def test_get_awaiting_my_review(self, pull_request):
117 def test_get_awaiting_my_review(self, pull_request):
118 PullRequestModel().update_reviewers(
118 PullRequestModel().update_reviewers(
119 pull_request, [(pull_request.author, ['author'])])
119 pull_request, [(pull_request.author, ['author'])])
120 prs = PullRequestModel().get_awaiting_my_review(
120 prs = PullRequestModel().get_awaiting_my_review(
121 pull_request.target_repo, user_id=pull_request.author.user_id)
121 pull_request.target_repo, user_id=pull_request.author.user_id)
122 assert isinstance(prs, list)
122 assert isinstance(prs, list)
123 assert len(prs) == 1
123 assert len(prs) == 1
124
124
125 def test_count_awaiting_my_review(self, pull_request):
125 def test_count_awaiting_my_review(self, pull_request):
126 PullRequestModel().update_reviewers(
126 PullRequestModel().update_reviewers(
127 pull_request, [(pull_request.author, ['author'])])
127 pull_request, [(pull_request.author, ['author'])])
128 pr_count = PullRequestModel().count_awaiting_my_review(
128 pr_count = PullRequestModel().count_awaiting_my_review(
129 pull_request.target_repo, user_id=pull_request.author.user_id)
129 pull_request.target_repo, user_id=pull_request.author.user_id)
130 assert pr_count == 1
130 assert pr_count == 1
131
131
132 def test_delete_calls_cleanup_merge(self, pull_request):
132 def test_delete_calls_cleanup_merge(self, pull_request):
133 PullRequestModel().delete(pull_request)
133 PullRequestModel().delete(pull_request)
134
134
135 self.workspace_remove_mock.assert_called_once_with(
135 self.workspace_remove_mock.assert_called_once_with(
136 self.workspace_id)
136 self.workspace_id)
137
137
138 def test_close_calls_cleanup_and_hook(self, pull_request):
138 def test_close_calls_cleanup_and_hook(self, pull_request):
139 PullRequestModel().close_pull_request(
139 PullRequestModel().close_pull_request(
140 pull_request, pull_request.author)
140 pull_request, pull_request.author)
141
141
142 self.workspace_remove_mock.assert_called_once_with(
142 self.workspace_remove_mock.assert_called_once_with(
143 self.workspace_id)
143 self.workspace_id)
144 self.hook_mock.assert_called_with(
144 self.hook_mock.assert_called_with(
145 self.pull_request, self.pull_request.author, 'close')
145 self.pull_request, self.pull_request.author, 'close')
146
146
147 def test_merge_status(self, pull_request):
147 def test_merge_status(self, pull_request):
148 self.merge_mock.return_value = MergeResponse(
148 self.merge_mock.return_value = MergeResponse(
149 True, False, None, MergeFailureReason.NONE)
149 True, False, None, MergeFailureReason.NONE)
150
150
151 assert pull_request._last_merge_source_rev is None
151 assert pull_request._last_merge_source_rev is None
152 assert pull_request._last_merge_target_rev is None
152 assert pull_request._last_merge_target_rev is None
153 assert pull_request._last_merge_status is None
153 assert pull_request._last_merge_status is None
154
154
155 status, msg = PullRequestModel().merge_status(pull_request)
155 status, msg = PullRequestModel().merge_status(pull_request)
156 assert status is True
156 assert status is True
157 assert msg.eval() == 'This pull request can be automatically merged.'
157 assert msg.eval() == 'This pull request can be automatically merged.'
158 self.merge_mock.assert_called_once_with(
158 self.merge_mock.assert_called_once_with(
159 pull_request.target_ref_parts,
159 pull_request.target_ref_parts,
160 pull_request.source_repo.scm_instance(),
160 pull_request.source_repo.scm_instance(),
161 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
161 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
162 use_rebase=False)
162 use_rebase=False)
163
163
164 assert pull_request._last_merge_source_rev == self.source_commit
164 assert pull_request._last_merge_source_rev == self.source_commit
165 assert pull_request._last_merge_target_rev == self.target_commit
165 assert pull_request._last_merge_target_rev == self.target_commit
166 assert pull_request._last_merge_status is MergeFailureReason.NONE
166 assert pull_request._last_merge_status is MergeFailureReason.NONE
167
167
168 self.merge_mock.reset_mock()
168 self.merge_mock.reset_mock()
169 status, msg = PullRequestModel().merge_status(pull_request)
169 status, msg = PullRequestModel().merge_status(pull_request)
170 assert status is True
170 assert status is True
171 assert msg.eval() == 'This pull request can be automatically merged.'
171 assert msg.eval() == 'This pull request can be automatically merged.'
172 assert self.merge_mock.called is False
172 assert self.merge_mock.called is False
173
173
174 def test_merge_status_known_failure(self, pull_request):
174 def test_merge_status_known_failure(self, pull_request):
175 self.merge_mock.return_value = MergeResponse(
175 self.merge_mock.return_value = MergeResponse(
176 False, False, None, MergeFailureReason.MERGE_FAILED)
176 False, False, None, MergeFailureReason.MERGE_FAILED)
177
177
178 assert pull_request._last_merge_source_rev is None
178 assert pull_request._last_merge_source_rev is None
179 assert pull_request._last_merge_target_rev is None
179 assert pull_request._last_merge_target_rev is None
180 assert pull_request._last_merge_status is None
180 assert pull_request._last_merge_status is None
181
181
182 status, msg = PullRequestModel().merge_status(pull_request)
182 status, msg = PullRequestModel().merge_status(pull_request)
183 assert status is False
183 assert status is False
184 assert (
184 assert (
185 msg.eval() ==
185 msg.eval() ==
186 'This pull request cannot be merged because of merge conflicts.')
186 'This pull request cannot be merged because of merge conflicts.')
187 self.merge_mock.assert_called_once_with(
187 self.merge_mock.assert_called_once_with(
188 pull_request.target_ref_parts,
188 pull_request.target_ref_parts,
189 pull_request.source_repo.scm_instance(),
189 pull_request.source_repo.scm_instance(),
190 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
190 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
191 use_rebase=False)
191 use_rebase=False)
192
192
193 assert pull_request._last_merge_source_rev == self.source_commit
193 assert pull_request._last_merge_source_rev == self.source_commit
194 assert pull_request._last_merge_target_rev == self.target_commit
194 assert pull_request._last_merge_target_rev == self.target_commit
195 assert (
195 assert (
196 pull_request._last_merge_status is MergeFailureReason.MERGE_FAILED)
196 pull_request._last_merge_status is MergeFailureReason.MERGE_FAILED)
197
197
198 self.merge_mock.reset_mock()
198 self.merge_mock.reset_mock()
199 status, msg = PullRequestModel().merge_status(pull_request)
199 status, msg = PullRequestModel().merge_status(pull_request)
200 assert status is False
200 assert status is False
201 assert (
201 assert (
202 msg.eval() ==
202 msg.eval() ==
203 'This pull request cannot be merged because of merge conflicts.')
203 'This pull request cannot be merged because of merge conflicts.')
204 assert self.merge_mock.called is False
204 assert self.merge_mock.called is False
205
205
206 def test_merge_status_unknown_failure(self, pull_request):
206 def test_merge_status_unknown_failure(self, pull_request):
207 self.merge_mock.return_value = MergeResponse(
207 self.merge_mock.return_value = MergeResponse(
208 False, False, None, MergeFailureReason.UNKNOWN)
208 False, False, None, MergeFailureReason.UNKNOWN)
209
209
210 assert pull_request._last_merge_source_rev is None
210 assert pull_request._last_merge_source_rev is None
211 assert pull_request._last_merge_target_rev is None
211 assert pull_request._last_merge_target_rev is None
212 assert pull_request._last_merge_status is None
212 assert pull_request._last_merge_status is None
213
213
214 status, msg = PullRequestModel().merge_status(pull_request)
214 status, msg = PullRequestModel().merge_status(pull_request)
215 assert status is False
215 assert status is False
216 assert msg.eval() == (
216 assert msg.eval() == (
217 'This pull request cannot be merged because of an unhandled'
217 'This pull request cannot be merged because of an unhandled'
218 ' exception.')
218 ' exception.')
219 self.merge_mock.assert_called_once_with(
219 self.merge_mock.assert_called_once_with(
220 pull_request.target_ref_parts,
220 pull_request.target_ref_parts,
221 pull_request.source_repo.scm_instance(),
221 pull_request.source_repo.scm_instance(),
222 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
222 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
223 use_rebase=False)
223 use_rebase=False)
224
224
225 assert pull_request._last_merge_source_rev is None
225 assert pull_request._last_merge_source_rev is None
226 assert pull_request._last_merge_target_rev is None
226 assert pull_request._last_merge_target_rev is None
227 assert pull_request._last_merge_status is None
227 assert pull_request._last_merge_status is None
228
228
229 self.merge_mock.reset_mock()
229 self.merge_mock.reset_mock()
230 status, msg = PullRequestModel().merge_status(pull_request)
230 status, msg = PullRequestModel().merge_status(pull_request)
231 assert status is False
231 assert status is False
232 assert msg.eval() == (
232 assert msg.eval() == (
233 'This pull request cannot be merged because of an unhandled'
233 'This pull request cannot be merged because of an unhandled'
234 ' exception.')
234 ' exception.')
235 assert self.merge_mock.called is True
235 assert self.merge_mock.called is True
236
236
237 def test_merge_status_when_target_is_locked(self, pull_request):
237 def test_merge_status_when_target_is_locked(self, pull_request):
238 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
238 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
239 status, msg = PullRequestModel().merge_status(pull_request)
239 status, msg = PullRequestModel().merge_status(pull_request)
240 assert status is False
240 assert status is False
241 assert msg.eval() == (
241 assert msg.eval() == (
242 'This pull request cannot be merged because the target repository'
242 'This pull request cannot be merged because the target repository'
243 ' is locked.')
243 ' is locked.')
244
244
245 def test_merge_status_requirements_check_target(self, pull_request):
245 def test_merge_status_requirements_check_target(self, pull_request):
246
246
247 def has_largefiles(self, repo):
247 def has_largefiles(self, repo):
248 return repo == pull_request.source_repo
248 return repo == pull_request.source_repo
249
249
250 patcher = mock.patch.object(
250 patcher = mock.patch.object(
251 PullRequestModel, '_has_largefiles', has_largefiles)
251 PullRequestModel, '_has_largefiles', has_largefiles)
252 with patcher:
252 with patcher:
253 status, msg = PullRequestModel().merge_status(pull_request)
253 status, msg = PullRequestModel().merge_status(pull_request)
254
254
255 assert status is False
255 assert status is False
256 assert msg == 'Target repository large files support is disabled.'
256 assert msg == 'Target repository large files support is disabled.'
257
257
258 def test_merge_status_requirements_check_source(self, pull_request):
258 def test_merge_status_requirements_check_source(self, pull_request):
259
259
260 def has_largefiles(self, repo):
260 def has_largefiles(self, repo):
261 return repo == pull_request.target_repo
261 return repo == pull_request.target_repo
262
262
263 patcher = mock.patch.object(
263 patcher = mock.patch.object(
264 PullRequestModel, '_has_largefiles', has_largefiles)
264 PullRequestModel, '_has_largefiles', has_largefiles)
265 with patcher:
265 with patcher:
266 status, msg = PullRequestModel().merge_status(pull_request)
266 status, msg = PullRequestModel().merge_status(pull_request)
267
267
268 assert status is False
268 assert status is False
269 assert msg == 'Source repository large files support is disabled.'
269 assert msg == 'Source repository large files support is disabled.'
270
270
271 def test_merge(self, pull_request, merge_extras):
271 def test_merge(self, pull_request, merge_extras):
272 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
272 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
273 merge_ref = Reference(
273 merge_ref = Reference(
274 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
274 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
275 self.merge_mock.return_value = MergeResponse(
275 self.merge_mock.return_value = MergeResponse(
276 True, True, merge_ref, MergeFailureReason.NONE)
276 True, True, merge_ref, MergeFailureReason.NONE)
277
277
278 merge_extras['repository'] = pull_request.target_repo.repo_name
278 merge_extras['repository'] = pull_request.target_repo.repo_name
279 PullRequestModel().merge(
279 PullRequestModel().merge(
280 pull_request, pull_request.author, extras=merge_extras)
280 pull_request, pull_request.author, extras=merge_extras)
281
281
282 message = (
282 message = (
283 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
283 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
284 u'\n\n {pr_title}'.format(
284 u'\n\n {pr_title}'.format(
285 pr_id=pull_request.pull_request_id,
285 pr_id=pull_request.pull_request_id,
286 source_repo=safe_unicode(
286 source_repo=safe_unicode(
287 pull_request.source_repo.scm_instance().name),
287 pull_request.source_repo.scm_instance().name),
288 source_ref_name=pull_request.source_ref_parts.name,
288 source_ref_name=pull_request.source_ref_parts.name,
289 pr_title=safe_unicode(pull_request.title)
289 pr_title=safe_unicode(pull_request.title)
290 )
290 )
291 )
291 )
292 self.merge_mock.assert_called_once_with(
292 self.merge_mock.assert_called_once_with(
293 pull_request.target_ref_parts,
293 pull_request.target_ref_parts,
294 pull_request.source_repo.scm_instance(),
294 pull_request.source_repo.scm_instance(),
295 pull_request.source_ref_parts, self.workspace_id,
295 pull_request.source_ref_parts, self.workspace_id,
296 user_name=user.username, user_email=user.email, message=message,
296 user_name=user.username, user_email=user.email, message=message,
297 use_rebase=False
297 use_rebase=False
298 )
298 )
299 self.invalidation_mock.assert_called_once_with(
299 self.invalidation_mock.assert_called_once_with(
300 pull_request.target_repo.repo_name)
300 pull_request.target_repo.repo_name)
301
301
302 self.hook_mock.assert_called_with(
302 self.hook_mock.assert_called_with(
303 self.pull_request, self.pull_request.author, 'merge')
303 self.pull_request, self.pull_request.author, 'merge')
304
304
305 pull_request = PullRequest.get(pull_request.pull_request_id)
305 pull_request = PullRequest.get(pull_request.pull_request_id)
306 assert (
306 assert (
307 pull_request.merge_rev ==
307 pull_request.merge_rev ==
308 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
308 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
309
309
310 def test_merge_failed(self, pull_request, merge_extras):
310 def test_merge_failed(self, pull_request, merge_extras):
311 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
311 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
312 merge_ref = Reference(
312 merge_ref = Reference(
313 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
313 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
314 self.merge_mock.return_value = MergeResponse(
314 self.merge_mock.return_value = MergeResponse(
315 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
315 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
316
316
317 merge_extras['repository'] = pull_request.target_repo.repo_name
317 merge_extras['repository'] = pull_request.target_repo.repo_name
318 PullRequestModel().merge(
318 PullRequestModel().merge(
319 pull_request, pull_request.author, extras=merge_extras)
319 pull_request, pull_request.author, extras=merge_extras)
320
320
321 message = (
321 message = (
322 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
322 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
323 u'\n\n {pr_title}'.format(
323 u'\n\n {pr_title}'.format(
324 pr_id=pull_request.pull_request_id,
324 pr_id=pull_request.pull_request_id,
325 source_repo=safe_unicode(
325 source_repo=safe_unicode(
326 pull_request.source_repo.scm_instance().name),
326 pull_request.source_repo.scm_instance().name),
327 source_ref_name=pull_request.source_ref_parts.name,
327 source_ref_name=pull_request.source_ref_parts.name,
328 pr_title=safe_unicode(pull_request.title)
328 pr_title=safe_unicode(pull_request.title)
329 )
329 )
330 )
330 )
331 self.merge_mock.assert_called_once_with(
331 self.merge_mock.assert_called_once_with(
332 pull_request.target_ref_parts,
332 pull_request.target_ref_parts,
333 pull_request.source_repo.scm_instance(),
333 pull_request.source_repo.scm_instance(),
334 pull_request.source_ref_parts, self.workspace_id,
334 pull_request.source_ref_parts, self.workspace_id,
335 user_name=user.username, user_email=user.email, message=message,
335 user_name=user.username, user_email=user.email, message=message,
336 use_rebase=False
336 use_rebase=False
337 )
337 )
338
338
339 pull_request = PullRequest.get(pull_request.pull_request_id)
339 pull_request = PullRequest.get(pull_request.pull_request_id)
340 assert self.invalidation_mock.called is False
340 assert self.invalidation_mock.called is False
341 assert pull_request.merge_rev is None
341 assert pull_request.merge_rev is None
342
342
343 def test_get_commit_ids(self, pull_request):
343 def test_get_commit_ids(self, pull_request):
344 # The PR has been not merget yet, so expect an exception
344 # The PR has been not merget yet, so expect an exception
345 with pytest.raises(ValueError):
345 with pytest.raises(ValueError):
346 PullRequestModel()._get_commit_ids(pull_request)
346 PullRequestModel()._get_commit_ids(pull_request)
347
347
348 # Merge revision is in the revisions list
348 # Merge revision is in the revisions list
349 pull_request.merge_rev = pull_request.revisions[0]
349 pull_request.merge_rev = pull_request.revisions[0]
350 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
350 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
351 assert commit_ids == pull_request.revisions
351 assert commit_ids == pull_request.revisions
352
352
353 # Merge revision is not in the revisions list
353 # Merge revision is not in the revisions list
354 pull_request.merge_rev = 'f000' * 10
354 pull_request.merge_rev = 'f000' * 10
355 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
355 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
356 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
356 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
357
357
358 def test_get_diff_from_pr_version(self, pull_request):
358 def test_get_diff_from_pr_version(self, pull_request):
359 source_repo = pull_request.source_repo
359 source_repo = pull_request.source_repo
360 source_ref_id = pull_request.source_ref_parts.commit_id
360 source_ref_id = pull_request.source_ref_parts.commit_id
361 target_ref_id = pull_request.target_ref_parts.commit_id
361 target_ref_id = pull_request.target_ref_parts.commit_id
362 diff = PullRequestModel()._get_diff_from_pr_or_version(
362 diff = PullRequestModel()._get_diff_from_pr_or_version(
363 source_repo, source_ref_id, target_ref_id, context=6)
363 source_repo, source_ref_id, target_ref_id, context=6)
364 assert 'file_1' in diff.raw
364 assert 'file_1' in diff.raw
365
365
366 def test_generate_title_returns_unicode(self):
366 def test_generate_title_returns_unicode(self):
367 title = PullRequestModel().generate_pullrequest_title(
367 title = PullRequestModel().generate_pullrequest_title(
368 source='source-dummy',
368 source='source-dummy',
369 source_ref='source-ref-dummy',
369 source_ref='source-ref-dummy',
370 target='target-dummy',
370 target='target-dummy',
371 )
371 )
372 assert type(title) == unicode
372 assert type(title) == unicode
373
373
374
374
375 class TestIntegrationMerge(object):
375 class TestIntegrationMerge(object):
376 @pytest.mark.parametrize('extra_config', (
376 @pytest.mark.parametrize('extra_config', (
377 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
377 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
378 {'vcs.hooks.protocol': 'Pyro4', 'vcs.hooks.direct_calls': False},
379 ))
378 ))
380 def test_merge_triggers_push_hooks(
379 def test_merge_triggers_push_hooks(
381 self, pr_util, user_admin, capture_rcextensions, merge_extras,
380 self, pr_util, user_admin, capture_rcextensions, merge_extras,
382 extra_config):
381 extra_config):
383 pull_request = pr_util.create_pull_request(
382 pull_request = pr_util.create_pull_request(
384 approved=True, mergeable=True)
383 approved=True, mergeable=True)
385 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
384 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
386 merge_extras['repository'] = pull_request.target_repo.repo_name
385 merge_extras['repository'] = pull_request.target_repo.repo_name
387 Session().commit()
386 Session().commit()
388
387
389 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
388 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
390 merge_state = PullRequestModel().merge(
389 merge_state = PullRequestModel().merge(
391 pull_request, user_admin, extras=merge_extras)
390 pull_request, user_admin, extras=merge_extras)
392
391
393 assert merge_state.executed
392 assert merge_state.executed
394 assert 'pre_push' in capture_rcextensions
393 assert 'pre_push' in capture_rcextensions
395 assert 'post_push' in capture_rcextensions
394 assert 'post_push' in capture_rcextensions
396
395
397 def test_merge_can_be_rejected_by_pre_push_hook(
396 def test_merge_can_be_rejected_by_pre_push_hook(
398 self, pr_util, user_admin, capture_rcextensions, merge_extras):
397 self, pr_util, user_admin, capture_rcextensions, merge_extras):
399 pull_request = pr_util.create_pull_request(
398 pull_request = pr_util.create_pull_request(
400 approved=True, mergeable=True)
399 approved=True, mergeable=True)
401 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
400 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
402 merge_extras['repository'] = pull_request.target_repo.repo_name
401 merge_extras['repository'] = pull_request.target_repo.repo_name
403 Session().commit()
402 Session().commit()
404
403
405 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
404 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
406 pre_pull.side_effect = RepositoryError("Disallow push!")
405 pre_pull.side_effect = RepositoryError("Disallow push!")
407 merge_status = PullRequestModel().merge(
406 merge_status = PullRequestModel().merge(
408 pull_request, user_admin, extras=merge_extras)
407 pull_request, user_admin, extras=merge_extras)
409
408
410 assert not merge_status.executed
409 assert not merge_status.executed
411 assert 'pre_push' not in capture_rcextensions
410 assert 'pre_push' not in capture_rcextensions
412 assert 'post_push' not in capture_rcextensions
411 assert 'post_push' not in capture_rcextensions
413
412
414 def test_merge_fails_if_target_is_locked(
413 def test_merge_fails_if_target_is_locked(
415 self, pr_util, user_regular, merge_extras):
414 self, pr_util, user_regular, merge_extras):
416 pull_request = pr_util.create_pull_request(
415 pull_request = pr_util.create_pull_request(
417 approved=True, mergeable=True)
416 approved=True, mergeable=True)
418 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
417 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
419 pull_request.target_repo.locked = locked_by
418 pull_request.target_repo.locked = locked_by
420 # TODO: johbo: Check if this can work based on the database, currently
419 # TODO: johbo: Check if this can work based on the database, currently
421 # all data is pre-computed, that's why just updating the DB is not
420 # all data is pre-computed, that's why just updating the DB is not
422 # enough.
421 # enough.
423 merge_extras['locked_by'] = locked_by
422 merge_extras['locked_by'] = locked_by
424 merge_extras['repository'] = pull_request.target_repo.repo_name
423 merge_extras['repository'] = pull_request.target_repo.repo_name
425 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
424 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
426 Session().commit()
425 Session().commit()
427 merge_status = PullRequestModel().merge(
426 merge_status = PullRequestModel().merge(
428 pull_request, user_regular, extras=merge_extras)
427 pull_request, user_regular, extras=merge_extras)
429 assert not merge_status.executed
428 assert not merge_status.executed
430
429
431
430
432 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
431 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
433 (False, 1, 0),
432 (False, 1, 0),
434 (True, 0, 1),
433 (True, 0, 1),
435 ])
434 ])
436 def test_outdated_comments(
435 def test_outdated_comments(
437 pr_util, use_outdated, inlines_count, outdated_count):
436 pr_util, use_outdated, inlines_count, outdated_count):
438 pull_request = pr_util.create_pull_request()
437 pull_request = pr_util.create_pull_request()
439 pr_util.create_inline_comment(file_path='not_in_updated_diff')
438 pr_util.create_inline_comment(file_path='not_in_updated_diff')
440
439
441 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
440 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
442 pr_util.add_one_commit()
441 pr_util.add_one_commit()
443 assert_inline_comments(
442 assert_inline_comments(
444 pull_request, visible=inlines_count, outdated=outdated_count)
443 pull_request, visible=inlines_count, outdated=outdated_count)
445 outdated_comment_mock.assert_called_with(pull_request)
444 outdated_comment_mock.assert_called_with(pull_request)
446
445
447
446
448 @pytest.fixture
447 @pytest.fixture
449 def merge_extras(user_regular):
448 def merge_extras(user_regular):
450 """
449 """
451 Context for the vcs operation when running a merge.
450 Context for the vcs operation when running a merge.
452 """
451 """
453 extras = {
452 extras = {
454 'ip': '127.0.0.1',
453 'ip': '127.0.0.1',
455 'username': user_regular.username,
454 'username': user_regular.username,
456 'action': 'push',
455 'action': 'push',
457 'repository': 'fake_target_repo_name',
456 'repository': 'fake_target_repo_name',
458 'scm': 'git',
457 'scm': 'git',
459 'config': 'fake_config_ini_path',
458 'config': 'fake_config_ini_path',
460 'make_lock': None,
459 'make_lock': None,
461 'locked_by': [None, None, None],
460 'locked_by': [None, None, None],
462 'server_url': 'http://test.example.com:5000',
461 'server_url': 'http://test.example.com:5000',
463 'hooks': ['push', 'pull'],
462 'hooks': ['push', 'pull'],
464 'is_shadow_repo': False,
463 'is_shadow_repo': False,
465 }
464 }
466 return extras
465 return extras
467
466
468
467
469 class TestUpdateCommentHandling(object):
468 class TestUpdateCommentHandling(object):
470
469
471 @pytest.fixture(autouse=True, scope='class')
470 @pytest.fixture(autouse=True, scope='class')
472 def enable_outdated_comments(self, request, pylonsapp):
471 def enable_outdated_comments(self, request, pylonsapp):
473 config_patch = mock.patch.dict(
472 config_patch = mock.patch.dict(
474 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
473 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
475 config_patch.start()
474 config_patch.start()
476
475
477 @request.addfinalizer
476 @request.addfinalizer
478 def cleanup():
477 def cleanup():
479 config_patch.stop()
478 config_patch.stop()
480
479
481 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
480 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
482 commits = [
481 commits = [
483 {'message': 'a'},
482 {'message': 'a'},
484 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
483 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
485 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
484 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
486 ]
485 ]
487 pull_request = pr_util.create_pull_request(
486 pull_request = pr_util.create_pull_request(
488 commits=commits, target_head='a', source_head='b', revisions=['b'])
487 commits=commits, target_head='a', source_head='b', revisions=['b'])
489 pr_util.create_inline_comment(file_path='file_b')
488 pr_util.create_inline_comment(file_path='file_b')
490 pr_util.add_one_commit(head='c')
489 pr_util.add_one_commit(head='c')
491
490
492 assert_inline_comments(pull_request, visible=1, outdated=0)
491 assert_inline_comments(pull_request, visible=1, outdated=0)
493
492
494 def test_comment_stays_unflagged_on_change_above(self, pr_util):
493 def test_comment_stays_unflagged_on_change_above(self, pr_util):
495 original_content = ''.join(
494 original_content = ''.join(
496 ['line {}\n'.format(x) for x in range(1, 11)])
495 ['line {}\n'.format(x) for x in range(1, 11)])
497 updated_content = 'new_line_at_top\n' + original_content
496 updated_content = 'new_line_at_top\n' + original_content
498 commits = [
497 commits = [
499 {'message': 'a'},
498 {'message': 'a'},
500 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
499 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
501 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
500 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
502 ]
501 ]
503 pull_request = pr_util.create_pull_request(
502 pull_request = pr_util.create_pull_request(
504 commits=commits, target_head='a', source_head='b', revisions=['b'])
503 commits=commits, target_head='a', source_head='b', revisions=['b'])
505
504
506 with outdated_comments_patcher():
505 with outdated_comments_patcher():
507 comment = pr_util.create_inline_comment(
506 comment = pr_util.create_inline_comment(
508 line_no=u'n8', file_path='file_b')
507 line_no=u'n8', file_path='file_b')
509 pr_util.add_one_commit(head='c')
508 pr_util.add_one_commit(head='c')
510
509
511 assert_inline_comments(pull_request, visible=1, outdated=0)
510 assert_inline_comments(pull_request, visible=1, outdated=0)
512 assert comment.line_no == u'n9'
511 assert comment.line_no == u'n9'
513
512
514 def test_comment_stays_unflagged_on_change_below(self, pr_util):
513 def test_comment_stays_unflagged_on_change_below(self, pr_util):
515 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
514 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
516 updated_content = original_content + 'new_line_at_end\n'
515 updated_content = original_content + 'new_line_at_end\n'
517 commits = [
516 commits = [
518 {'message': 'a'},
517 {'message': 'a'},
519 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
518 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
520 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
519 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
521 ]
520 ]
522 pull_request = pr_util.create_pull_request(
521 pull_request = pr_util.create_pull_request(
523 commits=commits, target_head='a', source_head='b', revisions=['b'])
522 commits=commits, target_head='a', source_head='b', revisions=['b'])
524 pr_util.create_inline_comment(file_path='file_b')
523 pr_util.create_inline_comment(file_path='file_b')
525 pr_util.add_one_commit(head='c')
524 pr_util.add_one_commit(head='c')
526
525
527 assert_inline_comments(pull_request, visible=1, outdated=0)
526 assert_inline_comments(pull_request, visible=1, outdated=0)
528
527
529 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
528 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
530 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
529 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
531 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
530 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
532 change_lines = list(base_lines)
531 change_lines = list(base_lines)
533 change_lines.insert(6, 'line 6a added\n')
532 change_lines.insert(6, 'line 6a added\n')
534
533
535 # Changes on the last line of sight
534 # Changes on the last line of sight
536 update_lines = list(change_lines)
535 update_lines = list(change_lines)
537 update_lines[0] = 'line 1 changed\n'
536 update_lines[0] = 'line 1 changed\n'
538 update_lines[-1] = 'line 12 changed\n'
537 update_lines[-1] = 'line 12 changed\n'
539
538
540 def file_b(lines):
539 def file_b(lines):
541 return FileNode('file_b', ''.join(lines))
540 return FileNode('file_b', ''.join(lines))
542
541
543 commits = [
542 commits = [
544 {'message': 'a', 'added': [file_b(base_lines)]},
543 {'message': 'a', 'added': [file_b(base_lines)]},
545 {'message': 'b', 'changed': [file_b(change_lines)]},
544 {'message': 'b', 'changed': [file_b(change_lines)]},
546 {'message': 'c', 'changed': [file_b(update_lines)]},
545 {'message': 'c', 'changed': [file_b(update_lines)]},
547 ]
546 ]
548
547
549 pull_request = pr_util.create_pull_request(
548 pull_request = pr_util.create_pull_request(
550 commits=commits, target_head='a', source_head='b', revisions=['b'])
549 commits=commits, target_head='a', source_head='b', revisions=['b'])
551 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
550 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
552
551
553 with outdated_comments_patcher():
552 with outdated_comments_patcher():
554 pr_util.add_one_commit(head='c')
553 pr_util.add_one_commit(head='c')
555 assert_inline_comments(pull_request, visible=0, outdated=1)
554 assert_inline_comments(pull_request, visible=0, outdated=1)
556
555
557 @pytest.mark.parametrize("change, content", [
556 @pytest.mark.parametrize("change, content", [
558 ('changed', 'changed\n'),
557 ('changed', 'changed\n'),
559 ('removed', ''),
558 ('removed', ''),
560 ], ids=['changed', 'removed'])
559 ], ids=['changed', 'removed'])
561 def test_comment_flagged_on_change(self, pr_util, change, content):
560 def test_comment_flagged_on_change(self, pr_util, change, content):
562 commits = [
561 commits = [
563 {'message': 'a'},
562 {'message': 'a'},
564 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
563 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
565 {'message': 'c', change: [FileNode('file_b', content)]},
564 {'message': 'c', change: [FileNode('file_b', content)]},
566 ]
565 ]
567 pull_request = pr_util.create_pull_request(
566 pull_request = pr_util.create_pull_request(
568 commits=commits, target_head='a', source_head='b', revisions=['b'])
567 commits=commits, target_head='a', source_head='b', revisions=['b'])
569 pr_util.create_inline_comment(file_path='file_b')
568 pr_util.create_inline_comment(file_path='file_b')
570
569
571 with outdated_comments_patcher():
570 with outdated_comments_patcher():
572 pr_util.add_one_commit(head='c')
571 pr_util.add_one_commit(head='c')
573 assert_inline_comments(pull_request, visible=0, outdated=1)
572 assert_inline_comments(pull_request, visible=0, outdated=1)
574
573
575
574
576 class TestUpdateChangedFiles(object):
575 class TestUpdateChangedFiles(object):
577
576
578 def test_no_changes_on_unchanged_diff(self, pr_util):
577 def test_no_changes_on_unchanged_diff(self, pr_util):
579 commits = [
578 commits = [
580 {'message': 'a'},
579 {'message': 'a'},
581 {'message': 'b',
580 {'message': 'b',
582 'added': [FileNode('file_b', 'test_content b\n')]},
581 'added': [FileNode('file_b', 'test_content b\n')]},
583 {'message': 'c',
582 {'message': 'c',
584 'added': [FileNode('file_c', 'test_content c\n')]},
583 'added': [FileNode('file_c', 'test_content c\n')]},
585 ]
584 ]
586 # open a PR from a to b, adding file_b
585 # open a PR from a to b, adding file_b
587 pull_request = pr_util.create_pull_request(
586 pull_request = pr_util.create_pull_request(
588 commits=commits, target_head='a', source_head='b', revisions=['b'],
587 commits=commits, target_head='a', source_head='b', revisions=['b'],
589 name_suffix='per-file-review')
588 name_suffix='per-file-review')
590
589
591 # modify PR adding new file file_c
590 # modify PR adding new file file_c
592 pr_util.add_one_commit(head='c')
591 pr_util.add_one_commit(head='c')
593
592
594 assert_pr_file_changes(
593 assert_pr_file_changes(
595 pull_request,
594 pull_request,
596 added=['file_c'],
595 added=['file_c'],
597 modified=[],
596 modified=[],
598 removed=[])
597 removed=[])
599
598
600 def test_modify_and_undo_modification_diff(self, pr_util):
599 def test_modify_and_undo_modification_diff(self, pr_util):
601 commits = [
600 commits = [
602 {'message': 'a'},
601 {'message': 'a'},
603 {'message': 'b',
602 {'message': 'b',
604 'added': [FileNode('file_b', 'test_content b\n')]},
603 'added': [FileNode('file_b', 'test_content b\n')]},
605 {'message': 'c',
604 {'message': 'c',
606 'changed': [FileNode('file_b', 'test_content b modified\n')]},
605 'changed': [FileNode('file_b', 'test_content b modified\n')]},
607 {'message': 'd',
606 {'message': 'd',
608 'changed': [FileNode('file_b', 'test_content b\n')]},
607 'changed': [FileNode('file_b', 'test_content b\n')]},
609 ]
608 ]
610 # open a PR from a to b, adding file_b
609 # open a PR from a to b, adding file_b
611 pull_request = pr_util.create_pull_request(
610 pull_request = pr_util.create_pull_request(
612 commits=commits, target_head='a', source_head='b', revisions=['b'],
611 commits=commits, target_head='a', source_head='b', revisions=['b'],
613 name_suffix='per-file-review')
612 name_suffix='per-file-review')
614
613
615 # modify PR modifying file file_b
614 # modify PR modifying file file_b
616 pr_util.add_one_commit(head='c')
615 pr_util.add_one_commit(head='c')
617
616
618 assert_pr_file_changes(
617 assert_pr_file_changes(
619 pull_request,
618 pull_request,
620 added=[],
619 added=[],
621 modified=['file_b'],
620 modified=['file_b'],
622 removed=[])
621 removed=[])
623
622
624 # move the head again to d, which rollbacks change,
623 # move the head again to d, which rollbacks change,
625 # meaning we should indicate no changes
624 # meaning we should indicate no changes
626 pr_util.add_one_commit(head='d')
625 pr_util.add_one_commit(head='d')
627
626
628 assert_pr_file_changes(
627 assert_pr_file_changes(
629 pull_request,
628 pull_request,
630 added=[],
629 added=[],
631 modified=[],
630 modified=[],
632 removed=[])
631 removed=[])
633
632
634 def test_updated_all_files_in_pr(self, pr_util):
633 def test_updated_all_files_in_pr(self, pr_util):
635 commits = [
634 commits = [
636 {'message': 'a'},
635 {'message': 'a'},
637 {'message': 'b', 'added': [
636 {'message': 'b', 'added': [
638 FileNode('file_a', 'test_content a\n'),
637 FileNode('file_a', 'test_content a\n'),
639 FileNode('file_b', 'test_content b\n'),
638 FileNode('file_b', 'test_content b\n'),
640 FileNode('file_c', 'test_content c\n')]},
639 FileNode('file_c', 'test_content c\n')]},
641 {'message': 'c', 'changed': [
640 {'message': 'c', 'changed': [
642 FileNode('file_a', 'test_content a changed\n'),
641 FileNode('file_a', 'test_content a changed\n'),
643 FileNode('file_b', 'test_content b changed\n'),
642 FileNode('file_b', 'test_content b changed\n'),
644 FileNode('file_c', 'test_content c changed\n')]},
643 FileNode('file_c', 'test_content c changed\n')]},
645 ]
644 ]
646 # open a PR from a to b, changing 3 files
645 # open a PR from a to b, changing 3 files
647 pull_request = pr_util.create_pull_request(
646 pull_request = pr_util.create_pull_request(
648 commits=commits, target_head='a', source_head='b', revisions=['b'],
647 commits=commits, target_head='a', source_head='b', revisions=['b'],
649 name_suffix='per-file-review')
648 name_suffix='per-file-review')
650
649
651 pr_util.add_one_commit(head='c')
650 pr_util.add_one_commit(head='c')
652
651
653 assert_pr_file_changes(
652 assert_pr_file_changes(
654 pull_request,
653 pull_request,
655 added=[],
654 added=[],
656 modified=['file_a', 'file_b', 'file_c'],
655 modified=['file_a', 'file_b', 'file_c'],
657 removed=[])
656 removed=[])
658
657
659 def test_updated_and_removed_all_files_in_pr(self, pr_util):
658 def test_updated_and_removed_all_files_in_pr(self, pr_util):
660 commits = [
659 commits = [
661 {'message': 'a'},
660 {'message': 'a'},
662 {'message': 'b', 'added': [
661 {'message': 'b', 'added': [
663 FileNode('file_a', 'test_content a\n'),
662 FileNode('file_a', 'test_content a\n'),
664 FileNode('file_b', 'test_content b\n'),
663 FileNode('file_b', 'test_content b\n'),
665 FileNode('file_c', 'test_content c\n')]},
664 FileNode('file_c', 'test_content c\n')]},
666 {'message': 'c', 'removed': [
665 {'message': 'c', 'removed': [
667 FileNode('file_a', 'test_content a changed\n'),
666 FileNode('file_a', 'test_content a changed\n'),
668 FileNode('file_b', 'test_content b changed\n'),
667 FileNode('file_b', 'test_content b changed\n'),
669 FileNode('file_c', 'test_content c changed\n')]},
668 FileNode('file_c', 'test_content c changed\n')]},
670 ]
669 ]
671 # open a PR from a to b, removing 3 files
670 # open a PR from a to b, removing 3 files
672 pull_request = pr_util.create_pull_request(
671 pull_request = pr_util.create_pull_request(
673 commits=commits, target_head='a', source_head='b', revisions=['b'],
672 commits=commits, target_head='a', source_head='b', revisions=['b'],
674 name_suffix='per-file-review')
673 name_suffix='per-file-review')
675
674
676 pr_util.add_one_commit(head='c')
675 pr_util.add_one_commit(head='c')
677
676
678 assert_pr_file_changes(
677 assert_pr_file_changes(
679 pull_request,
678 pull_request,
680 added=[],
679 added=[],
681 modified=[],
680 modified=[],
682 removed=['file_a', 'file_b', 'file_c'])
681 removed=['file_a', 'file_b', 'file_c'])
683
682
684
683
685 def test_update_writes_snapshot_into_pull_request_version(pr_util):
684 def test_update_writes_snapshot_into_pull_request_version(pr_util):
686 model = PullRequestModel()
685 model = PullRequestModel()
687 pull_request = pr_util.create_pull_request()
686 pull_request = pr_util.create_pull_request()
688 pr_util.update_source_repository()
687 pr_util.update_source_repository()
689
688
690 model.update_commits(pull_request)
689 model.update_commits(pull_request)
691
690
692 # Expect that it has a version entry now
691 # Expect that it has a version entry now
693 assert len(model.get_versions(pull_request)) == 1
692 assert len(model.get_versions(pull_request)) == 1
694
693
695
694
696 def test_update_skips_new_version_if_unchanged(pr_util):
695 def test_update_skips_new_version_if_unchanged(pr_util):
697 pull_request = pr_util.create_pull_request()
696 pull_request = pr_util.create_pull_request()
698 model = PullRequestModel()
697 model = PullRequestModel()
699 model.update_commits(pull_request)
698 model.update_commits(pull_request)
700
699
701 # Expect that it still has no versions
700 # Expect that it still has no versions
702 assert len(model.get_versions(pull_request)) == 0
701 assert len(model.get_versions(pull_request)) == 0
703
702
704
703
705 def test_update_assigns_comments_to_the_new_version(pr_util):
704 def test_update_assigns_comments_to_the_new_version(pr_util):
706 model = PullRequestModel()
705 model = PullRequestModel()
707 pull_request = pr_util.create_pull_request()
706 pull_request = pr_util.create_pull_request()
708 comment = pr_util.create_comment()
707 comment = pr_util.create_comment()
709 pr_util.update_source_repository()
708 pr_util.update_source_repository()
710
709
711 model.update_commits(pull_request)
710 model.update_commits(pull_request)
712
711
713 # Expect that the comment is linked to the pr version now
712 # Expect that the comment is linked to the pr version now
714 assert comment.pull_request_version == model.get_versions(pull_request)[0]
713 assert comment.pull_request_version == model.get_versions(pull_request)[0]
715
714
716
715
717 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util):
716 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util):
718 model = PullRequestModel()
717 model = PullRequestModel()
719 pull_request = pr_util.create_pull_request()
718 pull_request = pr_util.create_pull_request()
720 pr_util.update_source_repository()
719 pr_util.update_source_repository()
721 pr_util.update_source_repository()
720 pr_util.update_source_repository()
722
721
723 model.update_commits(pull_request)
722 model.update_commits(pull_request)
724
723
725 # Expect to find a new comment about the change
724 # Expect to find a new comment about the change
726 expected_message = textwrap.dedent(
725 expected_message = textwrap.dedent(
727 """\
726 """\
728 Pull request updated. Auto status change to |under_review|
727 Pull request updated. Auto status change to |under_review|
729
728
730 .. role:: added
729 .. role:: added
731 .. role:: removed
730 .. role:: removed
732 .. parsed-literal::
731 .. parsed-literal::
733
732
734 Changed commits:
733 Changed commits:
735 * :added:`1 added`
734 * :added:`1 added`
736 * :removed:`0 removed`
735 * :removed:`0 removed`
737
736
738 Changed files:
737 Changed files:
739 * `A file_2 <#a_c--92ed3b5f07b4>`_
738 * `A file_2 <#a_c--92ed3b5f07b4>`_
740
739
741 .. |under_review| replace:: *"Under Review"*"""
740 .. |under_review| replace:: *"Under Review"*"""
742 )
741 )
743 pull_request_comments = sorted(
742 pull_request_comments = sorted(
744 pull_request.comments, key=lambda c: c.modified_at)
743 pull_request.comments, key=lambda c: c.modified_at)
745 update_comment = pull_request_comments[-1]
744 update_comment = pull_request_comments[-1]
746 assert update_comment.text == expected_message
745 assert update_comment.text == expected_message
747
746
748
747
749 def test_create_version_from_snapshot_updates_attributes(pr_util):
748 def test_create_version_from_snapshot_updates_attributes(pr_util):
750 pull_request = pr_util.create_pull_request()
749 pull_request = pr_util.create_pull_request()
751
750
752 # Avoiding default values
751 # Avoiding default values
753 pull_request.status = PullRequest.STATUS_CLOSED
752 pull_request.status = PullRequest.STATUS_CLOSED
754 pull_request._last_merge_source_rev = "0" * 40
753 pull_request._last_merge_source_rev = "0" * 40
755 pull_request._last_merge_target_rev = "1" * 40
754 pull_request._last_merge_target_rev = "1" * 40
756 pull_request._last_merge_status = 1
755 pull_request._last_merge_status = 1
757 pull_request.merge_rev = "2" * 40
756 pull_request.merge_rev = "2" * 40
758
757
759 # Remember automatic values
758 # Remember automatic values
760 created_on = pull_request.created_on
759 created_on = pull_request.created_on
761 updated_on = pull_request.updated_on
760 updated_on = pull_request.updated_on
762
761
763 # Create a new version of the pull request
762 # Create a new version of the pull request
764 version = PullRequestModel()._create_version_from_snapshot(pull_request)
763 version = PullRequestModel()._create_version_from_snapshot(pull_request)
765
764
766 # Check attributes
765 # Check attributes
767 assert version.title == pr_util.create_parameters['title']
766 assert version.title == pr_util.create_parameters['title']
768 assert version.description == pr_util.create_parameters['description']
767 assert version.description == pr_util.create_parameters['description']
769 assert version.status == PullRequest.STATUS_CLOSED
768 assert version.status == PullRequest.STATUS_CLOSED
770
769
771 # versions get updated created_on
770 # versions get updated created_on
772 assert version.created_on != created_on
771 assert version.created_on != created_on
773
772
774 assert version.updated_on == updated_on
773 assert version.updated_on == updated_on
775 assert version.user_id == pull_request.user_id
774 assert version.user_id == pull_request.user_id
776 assert version.revisions == pr_util.create_parameters['revisions']
775 assert version.revisions == pr_util.create_parameters['revisions']
777 assert version.source_repo == pr_util.source_repository
776 assert version.source_repo == pr_util.source_repository
778 assert version.source_ref == pr_util.create_parameters['source_ref']
777 assert version.source_ref == pr_util.create_parameters['source_ref']
779 assert version.target_repo == pr_util.target_repository
778 assert version.target_repo == pr_util.target_repository
780 assert version.target_ref == pr_util.create_parameters['target_ref']
779 assert version.target_ref == pr_util.create_parameters['target_ref']
781 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
780 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
782 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
781 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
783 assert version._last_merge_status == pull_request._last_merge_status
782 assert version._last_merge_status == pull_request._last_merge_status
784 assert version.merge_rev == pull_request.merge_rev
783 assert version.merge_rev == pull_request.merge_rev
785 assert version.pull_request == pull_request
784 assert version.pull_request == pull_request
786
785
787
786
788 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util):
787 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util):
789 version1 = pr_util.create_version_of_pull_request()
788 version1 = pr_util.create_version_of_pull_request()
790 comment_linked = pr_util.create_comment(linked_to=version1)
789 comment_linked = pr_util.create_comment(linked_to=version1)
791 comment_unlinked = pr_util.create_comment()
790 comment_unlinked = pr_util.create_comment()
792 version2 = pr_util.create_version_of_pull_request()
791 version2 = pr_util.create_version_of_pull_request()
793
792
794 PullRequestModel()._link_comments_to_version(version2)
793 PullRequestModel()._link_comments_to_version(version2)
795
794
796 # Expect that only the new comment is linked to version2
795 # Expect that only the new comment is linked to version2
797 assert (
796 assert (
798 comment_unlinked.pull_request_version_id ==
797 comment_unlinked.pull_request_version_id ==
799 version2.pull_request_version_id)
798 version2.pull_request_version_id)
800 assert (
799 assert (
801 comment_linked.pull_request_version_id ==
800 comment_linked.pull_request_version_id ==
802 version1.pull_request_version_id)
801 version1.pull_request_version_id)
803 assert (
802 assert (
804 comment_unlinked.pull_request_version_id !=
803 comment_unlinked.pull_request_version_id !=
805 comment_linked.pull_request_version_id)
804 comment_linked.pull_request_version_id)
806
805
807
806
808 def test_calculate_commits():
807 def test_calculate_commits():
809 old_ids = [1, 2, 3]
808 old_ids = [1, 2, 3]
810 new_ids = [1, 3, 4, 5]
809 new_ids = [1, 3, 4, 5]
811 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
810 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
812 assert change.added == [4, 5]
811 assert change.added == [4, 5]
813 assert change.common == [1, 3]
812 assert change.common == [1, 3]
814 assert change.removed == [2]
813 assert change.removed == [2]
815 assert change.total == [1, 3, 4, 5]
814 assert change.total == [1, 3, 4, 5]
816
815
817
816
818 def assert_inline_comments(pull_request, visible=None, outdated=None):
817 def assert_inline_comments(pull_request, visible=None, outdated=None):
819 if visible is not None:
818 if visible is not None:
820 inline_comments = CommentsModel().get_inline_comments(
819 inline_comments = CommentsModel().get_inline_comments(
821 pull_request.target_repo.repo_id, pull_request=pull_request)
820 pull_request.target_repo.repo_id, pull_request=pull_request)
822 inline_cnt = CommentsModel().get_inline_comments_count(
821 inline_cnt = CommentsModel().get_inline_comments_count(
823 inline_comments)
822 inline_comments)
824 assert inline_cnt == visible
823 assert inline_cnt == visible
825 if outdated is not None:
824 if outdated is not None:
826 outdated_comments = CommentsModel().get_outdated_comments(
825 outdated_comments = CommentsModel().get_outdated_comments(
827 pull_request.target_repo.repo_id, pull_request)
826 pull_request.target_repo.repo_id, pull_request)
828 assert len(outdated_comments) == outdated
827 assert len(outdated_comments) == outdated
829
828
830
829
831 def assert_pr_file_changes(
830 def assert_pr_file_changes(
832 pull_request, added=None, modified=None, removed=None):
831 pull_request, added=None, modified=None, removed=None):
833 pr_versions = PullRequestModel().get_versions(pull_request)
832 pr_versions = PullRequestModel().get_versions(pull_request)
834 # always use first version, ie original PR to calculate changes
833 # always use first version, ie original PR to calculate changes
835 pull_request_version = pr_versions[0]
834 pull_request_version = pr_versions[0]
836 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
835 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
837 pull_request, pull_request_version)
836 pull_request, pull_request_version)
838 file_changes = PullRequestModel()._calculate_file_changes(
837 file_changes = PullRequestModel()._calculate_file_changes(
839 old_diff_data, new_diff_data)
838 old_diff_data, new_diff_data)
840
839
841 assert added == file_changes.added, \
840 assert added == file_changes.added, \
842 'expected added:%s vs value:%s' % (added, file_changes.added)
841 'expected added:%s vs value:%s' % (added, file_changes.added)
843 assert modified == file_changes.modified, \
842 assert modified == file_changes.modified, \
844 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
843 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
845 assert removed == file_changes.removed, \
844 assert removed == file_changes.removed, \
846 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
845 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
847
846
848
847
849 def outdated_comments_patcher(use_outdated=True):
848 def outdated_comments_patcher(use_outdated=True):
850 return mock.patch.object(
849 return mock.patch.object(
851 CommentsModel, 'use_outdated_comments',
850 CommentsModel, 'use_outdated_comments',
852 return_value=use_outdated)
851 return_value=use_outdated)
@@ -1,1825 +1,1805 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33
33
34 import mock
34 import mock
35 import pyramid.testing
35 import pyramid.testing
36 import pytest
36 import pytest
37 import colander
37 import colander
38 import requests
38 import requests
39
39
40 import rhodecode
40 import rhodecode
41 from rhodecode.lib.utils2 import AttributeDict
41 from rhodecode.lib.utils2 import AttributeDict
42 from rhodecode.model.changeset_status import ChangesetStatusModel
42 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.comment import CommentsModel
43 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.db import (
44 from rhodecode.model.db import (
45 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
45 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
46 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
46 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
48 from rhodecode.model.pull_request import PullRequestModel
48 from rhodecode.model.pull_request import PullRequestModel
49 from rhodecode.model.repo import RepoModel
49 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.repo_group import RepoGroupModel
51 from rhodecode.model.user import UserModel
51 from rhodecode.model.user import UserModel
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.user_group import UserGroupModel
53 from rhodecode.model.user_group import UserGroupModel
54 from rhodecode.model.integration import IntegrationModel
54 from rhodecode.model.integration import IntegrationModel
55 from rhodecode.integrations import integration_type_registry
55 from rhodecode.integrations import integration_type_registry
56 from rhodecode.integrations.types.base import IntegrationTypeBase
56 from rhodecode.integrations.types.base import IntegrationTypeBase
57 from rhodecode.lib.utils import repo2db_mapper
57 from rhodecode.lib.utils import repo2db_mapper
58 from rhodecode.lib.vcs import create_vcsserver_proxy
58 from rhodecode.lib.vcs import create_vcsserver_proxy
59 from rhodecode.lib.vcs.backends import get_backend
59 from rhodecode.lib.vcs.backends import get_backend
60 from rhodecode.lib.vcs.nodes import FileNode
60 from rhodecode.lib.vcs.nodes import FileNode
61 from rhodecode.tests import (
61 from rhodecode.tests import (
62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 TEST_USER_REGULAR_PASS)
64 TEST_USER_REGULAR_PASS)
65 from rhodecode.tests.utils import CustomTestApp
65 from rhodecode.tests.utils import CustomTestApp
66 from rhodecode.tests.fixture import Fixture
66 from rhodecode.tests.fixture import Fixture
67
67
68
68
69 def _split_comma(value):
69 def _split_comma(value):
70 return value.split(',')
70 return value.split(',')
71
71
72
72
73 def pytest_addoption(parser):
73 def pytest_addoption(parser):
74 parser.addoption(
74 parser.addoption(
75 '--keep-tmp-path', action='store_true',
75 '--keep-tmp-path', action='store_true',
76 help="Keep the test temporary directories")
76 help="Keep the test temporary directories")
77 parser.addoption(
77 parser.addoption(
78 '--backends', action='store', type=_split_comma,
78 '--backends', action='store', type=_split_comma,
79 default=['git', 'hg', 'svn'],
79 default=['git', 'hg', 'svn'],
80 help="Select which backends to test for backend specific tests.")
80 help="Select which backends to test for backend specific tests.")
81 parser.addoption(
81 parser.addoption(
82 '--dbs', action='store', type=_split_comma,
82 '--dbs', action='store', type=_split_comma,
83 default=['sqlite'],
83 default=['sqlite'],
84 help="Select which database to test for database specific tests. "
84 help="Select which database to test for database specific tests. "
85 "Possible options are sqlite,postgres,mysql")
85 "Possible options are sqlite,postgres,mysql")
86 parser.addoption(
86 parser.addoption(
87 '--appenlight', '--ae', action='store_true',
87 '--appenlight', '--ae', action='store_true',
88 help="Track statistics in appenlight.")
88 help="Track statistics in appenlight.")
89 parser.addoption(
89 parser.addoption(
90 '--appenlight-api-key', '--ae-key',
90 '--appenlight-api-key', '--ae-key',
91 help="API key for Appenlight.")
91 help="API key for Appenlight.")
92 parser.addoption(
92 parser.addoption(
93 '--appenlight-url', '--ae-url',
93 '--appenlight-url', '--ae-url',
94 default="https://ae.rhodecode.com",
94 default="https://ae.rhodecode.com",
95 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
95 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
96 parser.addoption(
96 parser.addoption(
97 '--sqlite-connection-string', action='store',
97 '--sqlite-connection-string', action='store',
98 default='', help="Connection string for the dbs tests with SQLite")
98 default='', help="Connection string for the dbs tests with SQLite")
99 parser.addoption(
99 parser.addoption(
100 '--postgres-connection-string', action='store',
100 '--postgres-connection-string', action='store',
101 default='', help="Connection string for the dbs tests with Postgres")
101 default='', help="Connection string for the dbs tests with Postgres")
102 parser.addoption(
102 parser.addoption(
103 '--mysql-connection-string', action='store',
103 '--mysql-connection-string', action='store',
104 default='', help="Connection string for the dbs tests with MySQL")
104 default='', help="Connection string for the dbs tests with MySQL")
105 parser.addoption(
105 parser.addoption(
106 '--repeat', type=int, default=100,
106 '--repeat', type=int, default=100,
107 help="Number of repetitions in performance tests.")
107 help="Number of repetitions in performance tests.")
108
108
109
109
110 def pytest_configure(config):
110 def pytest_configure(config):
111 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
111 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
112 from rhodecode.config import patches
112 from rhodecode.config import patches
113 patches.kombu_1_5_1_python_2_7_11()
113 patches.kombu_1_5_1_python_2_7_11()
114
114
115
115
116 def pytest_collection_modifyitems(session, config, items):
116 def pytest_collection_modifyitems(session, config, items):
117 # nottest marked, compare nose, used for transition from nose to pytest
117 # nottest marked, compare nose, used for transition from nose to pytest
118 remaining = [
118 remaining = [
119 i for i in items if getattr(i.obj, '__test__', True)]
119 i for i in items if getattr(i.obj, '__test__', True)]
120 items[:] = remaining
120 items[:] = remaining
121
121
122
122
123 def pytest_generate_tests(metafunc):
123 def pytest_generate_tests(metafunc):
124 # Support test generation based on --backend parameter
124 # Support test generation based on --backend parameter
125 if 'backend_alias' in metafunc.fixturenames:
125 if 'backend_alias' in metafunc.fixturenames:
126 backends = get_backends_from_metafunc(metafunc)
126 backends = get_backends_from_metafunc(metafunc)
127 scope = None
127 scope = None
128 if not backends:
128 if not backends:
129 pytest.skip("Not enabled for any of selected backends")
129 pytest.skip("Not enabled for any of selected backends")
130 metafunc.parametrize('backend_alias', backends, scope=scope)
130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 elif hasattr(metafunc.function, 'backends'):
131 elif hasattr(metafunc.function, 'backends'):
132 backends = get_backends_from_metafunc(metafunc)
132 backends = get_backends_from_metafunc(metafunc)
133 if not backends:
133 if not backends:
134 pytest.skip("Not enabled for any of selected backends")
134 pytest.skip("Not enabled for any of selected backends")
135
135
136
136
137 def get_backends_from_metafunc(metafunc):
137 def get_backends_from_metafunc(metafunc):
138 requested_backends = set(metafunc.config.getoption('--backends'))
138 requested_backends = set(metafunc.config.getoption('--backends'))
139 if hasattr(metafunc.function, 'backends'):
139 if hasattr(metafunc.function, 'backends'):
140 # Supported backends by this test function, created from
140 # Supported backends by this test function, created from
141 # pytest.mark.backends
141 # pytest.mark.backends
142 backends = metafunc.function.backends.args
142 backends = metafunc.function.backends.args
143 elif hasattr(metafunc.cls, 'backend_alias'):
143 elif hasattr(metafunc.cls, 'backend_alias'):
144 # Support class attribute "backend_alias", this is mainly
144 # Support class attribute "backend_alias", this is mainly
145 # for legacy reasons for tests not yet using pytest.mark.backends
145 # for legacy reasons for tests not yet using pytest.mark.backends
146 backends = [metafunc.cls.backend_alias]
146 backends = [metafunc.cls.backend_alias]
147 else:
147 else:
148 backends = metafunc.config.getoption('--backends')
148 backends = metafunc.config.getoption('--backends')
149 return requested_backends.intersection(backends)
149 return requested_backends.intersection(backends)
150
150
151
151
152 @pytest.fixture(scope='session', autouse=True)
152 @pytest.fixture(scope='session', autouse=True)
153 def activate_example_rcextensions(request):
153 def activate_example_rcextensions(request):
154 """
154 """
155 Patch in an example rcextensions module which verifies passed in kwargs.
155 Patch in an example rcextensions module which verifies passed in kwargs.
156 """
156 """
157 from rhodecode.tests.other import example_rcextensions
157 from rhodecode.tests.other import example_rcextensions
158
158
159 old_extensions = rhodecode.EXTENSIONS
159 old_extensions = rhodecode.EXTENSIONS
160 rhodecode.EXTENSIONS = example_rcextensions
160 rhodecode.EXTENSIONS = example_rcextensions
161
161
162 @request.addfinalizer
162 @request.addfinalizer
163 def cleanup():
163 def cleanup():
164 rhodecode.EXTENSIONS = old_extensions
164 rhodecode.EXTENSIONS = old_extensions
165
165
166
166
167 @pytest.fixture
167 @pytest.fixture
168 def capture_rcextensions():
168 def capture_rcextensions():
169 """
169 """
170 Returns the recorded calls to entry points in rcextensions.
170 Returns the recorded calls to entry points in rcextensions.
171 """
171 """
172 calls = rhodecode.EXTENSIONS.calls
172 calls = rhodecode.EXTENSIONS.calls
173 calls.clear()
173 calls.clear()
174 # Note: At this moment, it is still the empty dict, but that will
174 # Note: At this moment, it is still the empty dict, but that will
175 # be filled during the test run and since it is a reference this
175 # be filled during the test run and since it is a reference this
176 # is enough to make it work.
176 # is enough to make it work.
177 return calls
177 return calls
178
178
179
179
180 @pytest.fixture(scope='session')
180 @pytest.fixture(scope='session')
181 def http_environ_session():
181 def http_environ_session():
182 """
182 """
183 Allow to use "http_environ" in session scope.
183 Allow to use "http_environ" in session scope.
184 """
184 """
185 return http_environ(
185 return http_environ(
186 http_host_stub=http_host_stub())
186 http_host_stub=http_host_stub())
187
187
188
188
189 @pytest.fixture
189 @pytest.fixture
190 def http_host_stub():
190 def http_host_stub():
191 """
191 """
192 Value of HTTP_HOST in the test run.
192 Value of HTTP_HOST in the test run.
193 """
193 """
194 return 'test.example.com:80'
194 return 'test.example.com:80'
195
195
196
196
197 @pytest.fixture
197 @pytest.fixture
198 def http_environ(http_host_stub):
198 def http_environ(http_host_stub):
199 """
199 """
200 HTTP extra environ keys.
200 HTTP extra environ keys.
201
201
202 User by the test application and as well for setting up the pylons
202 User by the test application and as well for setting up the pylons
203 environment. In the case of the fixture "app" it should be possible
203 environment. In the case of the fixture "app" it should be possible
204 to override this for a specific test case.
204 to override this for a specific test case.
205 """
205 """
206 return {
206 return {
207 'SERVER_NAME': http_host_stub.split(':')[0],
207 'SERVER_NAME': http_host_stub.split(':')[0],
208 'SERVER_PORT': http_host_stub.split(':')[1],
208 'SERVER_PORT': http_host_stub.split(':')[1],
209 'HTTP_HOST': http_host_stub,
209 'HTTP_HOST': http_host_stub,
210 }
210 }
211
211
212
212
213 @pytest.fixture(scope='function')
213 @pytest.fixture(scope='function')
214 def app(request, pylonsapp, http_environ):
214 def app(request, pylonsapp, http_environ):
215
215
216
216
217 app = CustomTestApp(
217 app = CustomTestApp(
218 pylonsapp,
218 pylonsapp,
219 extra_environ=http_environ)
219 extra_environ=http_environ)
220 if request.cls:
220 if request.cls:
221 request.cls.app = app
221 request.cls.app = app
222 return app
222 return app
223
223
224
224
225 @pytest.fixture(scope='session')
225 @pytest.fixture(scope='session')
226 def app_settings(pylonsapp, pylons_config):
226 def app_settings(pylonsapp, pylons_config):
227 """
227 """
228 Settings dictionary used to create the app.
228 Settings dictionary used to create the app.
229
229
230 Parses the ini file and passes the result through the sanitize and apply
230 Parses the ini file and passes the result through the sanitize and apply
231 defaults mechanism in `rhodecode.config.middleware`.
231 defaults mechanism in `rhodecode.config.middleware`.
232 """
232 """
233 from paste.deploy.loadwsgi import loadcontext, APP
233 from paste.deploy.loadwsgi import loadcontext, APP
234 from rhodecode.config.middleware import (
234 from rhodecode.config.middleware import (
235 sanitize_settings_and_apply_defaults)
235 sanitize_settings_and_apply_defaults)
236 context = loadcontext(APP, 'config:' + pylons_config)
236 context = loadcontext(APP, 'config:' + pylons_config)
237 settings = sanitize_settings_and_apply_defaults(context.config())
237 settings = sanitize_settings_and_apply_defaults(context.config())
238 return settings
238 return settings
239
239
240
240
241 @pytest.fixture(scope='session')
241 @pytest.fixture(scope='session')
242 def db(app_settings):
242 def db(app_settings):
243 """
243 """
244 Initializes the database connection.
244 Initializes the database connection.
245
245
246 It uses the same settings which are used to create the ``pylonsapp`` or
246 It uses the same settings which are used to create the ``pylonsapp`` or
247 ``app`` fixtures.
247 ``app`` fixtures.
248 """
248 """
249 from rhodecode.config.utils import initialize_database
249 from rhodecode.config.utils import initialize_database
250 initialize_database(app_settings)
250 initialize_database(app_settings)
251
251
252
252
253 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
253 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
254
254
255
255
256 def _autologin_user(app, *args):
256 def _autologin_user(app, *args):
257 session = login_user_session(app, *args)
257 session = login_user_session(app, *args)
258 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
258 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
259 return LoginData(csrf_token, session['rhodecode_user'])
259 return LoginData(csrf_token, session['rhodecode_user'])
260
260
261
261
262 @pytest.fixture
262 @pytest.fixture
263 def autologin_user(app):
263 def autologin_user(app):
264 """
264 """
265 Utility fixture which makes sure that the admin user is logged in
265 Utility fixture which makes sure that the admin user is logged in
266 """
266 """
267 return _autologin_user(app)
267 return _autologin_user(app)
268
268
269
269
270 @pytest.fixture
270 @pytest.fixture
271 def autologin_regular_user(app):
271 def autologin_regular_user(app):
272 """
272 """
273 Utility fixture which makes sure that the regular user is logged in
273 Utility fixture which makes sure that the regular user is logged in
274 """
274 """
275 return _autologin_user(
275 return _autologin_user(
276 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
276 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
277
277
278
278
279 @pytest.fixture(scope='function')
279 @pytest.fixture(scope='function')
280 def csrf_token(request, autologin_user):
280 def csrf_token(request, autologin_user):
281 return autologin_user.csrf_token
281 return autologin_user.csrf_token
282
282
283
283
284 @pytest.fixture(scope='function')
284 @pytest.fixture(scope='function')
285 def xhr_header(request):
285 def xhr_header(request):
286 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
286 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
287
287
288
288
289 @pytest.fixture
289 @pytest.fixture
290 def real_crypto_backend(monkeypatch):
290 def real_crypto_backend(monkeypatch):
291 """
291 """
292 Switch the production crypto backend on for this test.
292 Switch the production crypto backend on for this test.
293
293
294 During the test run the crypto backend is replaced with a faster
294 During the test run the crypto backend is replaced with a faster
295 implementation based on the MD5 algorithm.
295 implementation based on the MD5 algorithm.
296 """
296 """
297 monkeypatch.setattr(rhodecode, 'is_test', False)
297 monkeypatch.setattr(rhodecode, 'is_test', False)
298
298
299
299
300 @pytest.fixture(scope='class')
300 @pytest.fixture(scope='class')
301 def index_location(request, pylonsapp):
301 def index_location(request, pylonsapp):
302 index_location = pylonsapp.config['app_conf']['search.location']
302 index_location = pylonsapp.config['app_conf']['search.location']
303 if request.cls:
303 if request.cls:
304 request.cls.index_location = index_location
304 request.cls.index_location = index_location
305 return index_location
305 return index_location
306
306
307
307
308 @pytest.fixture(scope='session', autouse=True)
308 @pytest.fixture(scope='session', autouse=True)
309 def tests_tmp_path(request):
309 def tests_tmp_path(request):
310 """
310 """
311 Create temporary directory to be used during the test session.
311 Create temporary directory to be used during the test session.
312 """
312 """
313 if not os.path.exists(TESTS_TMP_PATH):
313 if not os.path.exists(TESTS_TMP_PATH):
314 os.makedirs(TESTS_TMP_PATH)
314 os.makedirs(TESTS_TMP_PATH)
315
315
316 if not request.config.getoption('--keep-tmp-path'):
316 if not request.config.getoption('--keep-tmp-path'):
317 @request.addfinalizer
317 @request.addfinalizer
318 def remove_tmp_path():
318 def remove_tmp_path():
319 shutil.rmtree(TESTS_TMP_PATH)
319 shutil.rmtree(TESTS_TMP_PATH)
320
320
321 return TESTS_TMP_PATH
321 return TESTS_TMP_PATH
322
322
323
323
324 @pytest.fixture(scope='session', autouse=True)
325 def patch_pyro_request_scope_proxy_factory(request):
326 """
327 Patch the pyro proxy factory to always use the same dummy request object
328 when under test. This will return the same pyro proxy on every call.
329 """
330 dummy_request = pyramid.testing.DummyRequest()
331
332 def mocked_call(self, request=None):
333 return self.getProxy(request=dummy_request)
334
335 patcher = mock.patch(
336 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
337 new=mocked_call)
338 patcher.start()
339
340 @request.addfinalizer
341 def undo_patching():
342 patcher.stop()
343
344
345 @pytest.fixture
324 @pytest.fixture
346 def test_repo_group(request):
325 def test_repo_group(request):
347 """
326 """
348 Create a temporary repository group, and destroy it after
327 Create a temporary repository group, and destroy it after
349 usage automatically
328 usage automatically
350 """
329 """
351 fixture = Fixture()
330 fixture = Fixture()
352 repogroupid = 'test_repo_group_%s' % int(time.time())
331 repogroupid = 'test_repo_group_%s' % int(time.time())
353 repo_group = fixture.create_repo_group(repogroupid)
332 repo_group = fixture.create_repo_group(repogroupid)
354
333
355 def _cleanup():
334 def _cleanup():
356 fixture.destroy_repo_group(repogroupid)
335 fixture.destroy_repo_group(repogroupid)
357
336
358 request.addfinalizer(_cleanup)
337 request.addfinalizer(_cleanup)
359 return repo_group
338 return repo_group
360
339
361
340
362 @pytest.fixture
341 @pytest.fixture
363 def test_user_group(request):
342 def test_user_group(request):
364 """
343 """
365 Create a temporary user group, and destroy it after
344 Create a temporary user group, and destroy it after
366 usage automatically
345 usage automatically
367 """
346 """
368 fixture = Fixture()
347 fixture = Fixture()
369 usergroupid = 'test_user_group_%s' % int(time.time())
348 usergroupid = 'test_user_group_%s' % int(time.time())
370 user_group = fixture.create_user_group(usergroupid)
349 user_group = fixture.create_user_group(usergroupid)
371
350
372 def _cleanup():
351 def _cleanup():
373 fixture.destroy_user_group(user_group)
352 fixture.destroy_user_group(user_group)
374
353
375 request.addfinalizer(_cleanup)
354 request.addfinalizer(_cleanup)
376 return user_group
355 return user_group
377
356
378
357
379 @pytest.fixture(scope='session')
358 @pytest.fixture(scope='session')
380 def test_repo(request):
359 def test_repo(request):
381 container = TestRepoContainer()
360 container = TestRepoContainer()
382 request.addfinalizer(container._cleanup)
361 request.addfinalizer(container._cleanup)
383 return container
362 return container
384
363
385
364
386 class TestRepoContainer(object):
365 class TestRepoContainer(object):
387 """
366 """
388 Container for test repositories which are used read only.
367 Container for test repositories which are used read only.
389
368
390 Repositories will be created on demand and re-used during the lifetime
369 Repositories will be created on demand and re-used during the lifetime
391 of this object.
370 of this object.
392
371
393 Usage to get the svn test repository "minimal"::
372 Usage to get the svn test repository "minimal"::
394
373
395 test_repo = TestContainer()
374 test_repo = TestContainer()
396 repo = test_repo('minimal', 'svn')
375 repo = test_repo('minimal', 'svn')
397
376
398 """
377 """
399
378
400 dump_extractors = {
379 dump_extractors = {
401 'git': utils.extract_git_repo_from_dump,
380 'git': utils.extract_git_repo_from_dump,
402 'hg': utils.extract_hg_repo_from_dump,
381 'hg': utils.extract_hg_repo_from_dump,
403 'svn': utils.extract_svn_repo_from_dump,
382 'svn': utils.extract_svn_repo_from_dump,
404 }
383 }
405
384
406 def __init__(self):
385 def __init__(self):
407 self._cleanup_repos = []
386 self._cleanup_repos = []
408 self._fixture = Fixture()
387 self._fixture = Fixture()
409 self._repos = {}
388 self._repos = {}
410
389
411 def __call__(self, dump_name, backend_alias):
390 def __call__(self, dump_name, backend_alias):
412 key = (dump_name, backend_alias)
391 key = (dump_name, backend_alias)
413 if key not in self._repos:
392 if key not in self._repos:
414 repo = self._create_repo(dump_name, backend_alias)
393 repo = self._create_repo(dump_name, backend_alias)
415 self._repos[key] = repo.repo_id
394 self._repos[key] = repo.repo_id
416 return Repository.get(self._repos[key])
395 return Repository.get(self._repos[key])
417
396
418 def _create_repo(self, dump_name, backend_alias):
397 def _create_repo(self, dump_name, backend_alias):
419 repo_name = '%s-%s' % (backend_alias, dump_name)
398 repo_name = '%s-%s' % (backend_alias, dump_name)
420 backend_class = get_backend(backend_alias)
399 backend_class = get_backend(backend_alias)
421 dump_extractor = self.dump_extractors[backend_alias]
400 dump_extractor = self.dump_extractors[backend_alias]
422 repo_path = dump_extractor(dump_name, repo_name)
401 repo_path = dump_extractor(dump_name, repo_name)
423 vcs_repo = backend_class(repo_path)
402 vcs_repo = backend_class(repo_path)
424 repo2db_mapper({repo_name: vcs_repo})
403 repo2db_mapper({repo_name: vcs_repo})
425 repo = RepoModel().get_by_repo_name(repo_name)
404 repo = RepoModel().get_by_repo_name(repo_name)
426 self._cleanup_repos.append(repo_name)
405 self._cleanup_repos.append(repo_name)
427 return repo
406 return repo
428
407
429 def _cleanup(self):
408 def _cleanup(self):
430 for repo_name in reversed(self._cleanup_repos):
409 for repo_name in reversed(self._cleanup_repos):
431 self._fixture.destroy_repo(repo_name)
410 self._fixture.destroy_repo(repo_name)
432
411
433
412
434 @pytest.fixture
413 @pytest.fixture
435 def backend(request, backend_alias, pylonsapp, test_repo):
414 def backend(request, backend_alias, pylonsapp, test_repo):
436 """
415 """
437 Parametrized fixture which represents a single backend implementation.
416 Parametrized fixture which represents a single backend implementation.
438
417
439 It respects the option `--backends` to focus the test run on specific
418 It respects the option `--backends` to focus the test run on specific
440 backend implementations.
419 backend implementations.
441
420
442 It also supports `pytest.mark.xfail_backends` to mark tests as failing
421 It also supports `pytest.mark.xfail_backends` to mark tests as failing
443 for specific backends. This is intended as a utility for incremental
422 for specific backends. This is intended as a utility for incremental
444 development of a new backend implementation.
423 development of a new backend implementation.
445 """
424 """
446 if backend_alias not in request.config.getoption('--backends'):
425 if backend_alias not in request.config.getoption('--backends'):
447 pytest.skip("Backend %s not selected." % (backend_alias, ))
426 pytest.skip("Backend %s not selected." % (backend_alias, ))
448
427
449 utils.check_xfail_backends(request.node, backend_alias)
428 utils.check_xfail_backends(request.node, backend_alias)
450 utils.check_skip_backends(request.node, backend_alias)
429 utils.check_skip_backends(request.node, backend_alias)
451
430
452 repo_name = 'vcs_test_%s' % (backend_alias, )
431 repo_name = 'vcs_test_%s' % (backend_alias, )
453 backend = Backend(
432 backend = Backend(
454 alias=backend_alias,
433 alias=backend_alias,
455 repo_name=repo_name,
434 repo_name=repo_name,
456 test_name=request.node.name,
435 test_name=request.node.name,
457 test_repo_container=test_repo)
436 test_repo_container=test_repo)
458 request.addfinalizer(backend.cleanup)
437 request.addfinalizer(backend.cleanup)
459 return backend
438 return backend
460
439
461
440
462 @pytest.fixture
441 @pytest.fixture
463 def backend_git(request, pylonsapp, test_repo):
442 def backend_git(request, pylonsapp, test_repo):
464 return backend(request, 'git', pylonsapp, test_repo)
443 return backend(request, 'git', pylonsapp, test_repo)
465
444
466
445
467 @pytest.fixture
446 @pytest.fixture
468 def backend_hg(request, pylonsapp, test_repo):
447 def backend_hg(request, pylonsapp, test_repo):
469 return backend(request, 'hg', pylonsapp, test_repo)
448 return backend(request, 'hg', pylonsapp, test_repo)
470
449
471
450
472 @pytest.fixture
451 @pytest.fixture
473 def backend_svn(request, pylonsapp, test_repo):
452 def backend_svn(request, pylonsapp, test_repo):
474 return backend(request, 'svn', pylonsapp, test_repo)
453 return backend(request, 'svn', pylonsapp, test_repo)
475
454
476
455
477 @pytest.fixture
456 @pytest.fixture
478 def backend_random(backend_git):
457 def backend_random(backend_git):
479 """
458 """
480 Use this to express that your tests need "a backend.
459 Use this to express that your tests need "a backend.
481
460
482 A few of our tests need a backend, so that we can run the code. This
461 A few of our tests need a backend, so that we can run the code. This
483 fixture is intended to be used for such cases. It will pick one of the
462 fixture is intended to be used for such cases. It will pick one of the
484 backends and run the tests.
463 backends and run the tests.
485
464
486 The fixture `backend` would run the test multiple times for each
465 The fixture `backend` would run the test multiple times for each
487 available backend which is a pure waste of time if the test is
466 available backend which is a pure waste of time if the test is
488 independent of the backend type.
467 independent of the backend type.
489 """
468 """
490 # TODO: johbo: Change this to pick a random backend
469 # TODO: johbo: Change this to pick a random backend
491 return backend_git
470 return backend_git
492
471
493
472
494 @pytest.fixture
473 @pytest.fixture
495 def backend_stub(backend_git):
474 def backend_stub(backend_git):
496 """
475 """
497 Use this to express that your tests need a backend stub
476 Use this to express that your tests need a backend stub
498
477
499 TODO: mikhail: Implement a real stub logic instead of returning
478 TODO: mikhail: Implement a real stub logic instead of returning
500 a git backend
479 a git backend
501 """
480 """
502 return backend_git
481 return backend_git
503
482
504
483
505 @pytest.fixture
484 @pytest.fixture
506 def repo_stub(backend_stub):
485 def repo_stub(backend_stub):
507 """
486 """
508 Use this to express that your tests need a repository stub
487 Use this to express that your tests need a repository stub
509 """
488 """
510 return backend_stub.create_repo()
489 return backend_stub.create_repo()
511
490
512
491
513 class Backend(object):
492 class Backend(object):
514 """
493 """
515 Represents the test configuration for one supported backend
494 Represents the test configuration for one supported backend
516
495
517 Provides easy access to different test repositories based on
496 Provides easy access to different test repositories based on
518 `__getitem__`. Such repositories will only be created once per test
497 `__getitem__`. Such repositories will only be created once per test
519 session.
498 session.
520 """
499 """
521
500
522 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
501 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
523 _master_repo = None
502 _master_repo = None
524 _commit_ids = {}
503 _commit_ids = {}
525
504
526 def __init__(self, alias, repo_name, test_name, test_repo_container):
505 def __init__(self, alias, repo_name, test_name, test_repo_container):
527 self.alias = alias
506 self.alias = alias
528 self.repo_name = repo_name
507 self.repo_name = repo_name
529 self._cleanup_repos = []
508 self._cleanup_repos = []
530 self._test_name = test_name
509 self._test_name = test_name
531 self._test_repo_container = test_repo_container
510 self._test_repo_container = test_repo_container
532 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
511 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
533 # Fixture will survive in the end.
512 # Fixture will survive in the end.
534 self._fixture = Fixture()
513 self._fixture = Fixture()
535
514
536 def __getitem__(self, key):
515 def __getitem__(self, key):
537 return self._test_repo_container(key, self.alias)
516 return self._test_repo_container(key, self.alias)
538
517
539 @property
518 @property
540 def repo(self):
519 def repo(self):
541 """
520 """
542 Returns the "current" repository. This is the vcs_test repo or the
521 Returns the "current" repository. This is the vcs_test repo or the
543 last repo which has been created with `create_repo`.
522 last repo which has been created with `create_repo`.
544 """
523 """
545 from rhodecode.model.db import Repository
524 from rhodecode.model.db import Repository
546 return Repository.get_by_repo_name(self.repo_name)
525 return Repository.get_by_repo_name(self.repo_name)
547
526
548 @property
527 @property
549 def default_branch_name(self):
528 def default_branch_name(self):
550 VcsRepository = get_backend(self.alias)
529 VcsRepository = get_backend(self.alias)
551 return VcsRepository.DEFAULT_BRANCH_NAME
530 return VcsRepository.DEFAULT_BRANCH_NAME
552
531
553 @property
532 @property
554 def default_head_id(self):
533 def default_head_id(self):
555 """
534 """
556 Returns the default head id of the underlying backend.
535 Returns the default head id of the underlying backend.
557
536
558 This will be the default branch name in case the backend does have a
537 This will be the default branch name in case the backend does have a
559 default branch. In the other cases it will point to a valid head
538 default branch. In the other cases it will point to a valid head
560 which can serve as the base to create a new commit on top of it.
539 which can serve as the base to create a new commit on top of it.
561 """
540 """
562 vcsrepo = self.repo.scm_instance()
541 vcsrepo = self.repo.scm_instance()
563 head_id = (
542 head_id = (
564 vcsrepo.DEFAULT_BRANCH_NAME or
543 vcsrepo.DEFAULT_BRANCH_NAME or
565 vcsrepo.commit_ids[-1])
544 vcsrepo.commit_ids[-1])
566 return head_id
545 return head_id
567
546
568 @property
547 @property
569 def commit_ids(self):
548 def commit_ids(self):
570 """
549 """
571 Returns the list of commits for the last created repository
550 Returns the list of commits for the last created repository
572 """
551 """
573 return self._commit_ids
552 return self._commit_ids
574
553
575 def create_master_repo(self, commits):
554 def create_master_repo(self, commits):
576 """
555 """
577 Create a repository and remember it as a template.
556 Create a repository and remember it as a template.
578
557
579 This allows to easily create derived repositories to construct
558 This allows to easily create derived repositories to construct
580 more complex scenarios for diff, compare and pull requests.
559 more complex scenarios for diff, compare and pull requests.
581
560
582 Returns a commit map which maps from commit message to raw_id.
561 Returns a commit map which maps from commit message to raw_id.
583 """
562 """
584 self._master_repo = self.create_repo(commits=commits)
563 self._master_repo = self.create_repo(commits=commits)
585 return self._commit_ids
564 return self._commit_ids
586
565
587 def create_repo(
566 def create_repo(
588 self, commits=None, number_of_commits=0, heads=None,
567 self, commits=None, number_of_commits=0, heads=None,
589 name_suffix=u'', **kwargs):
568 name_suffix=u'', **kwargs):
590 """
569 """
591 Create a repository and record it for later cleanup.
570 Create a repository and record it for later cleanup.
592
571
593 :param commits: Optional. A sequence of dict instances.
572 :param commits: Optional. A sequence of dict instances.
594 Will add a commit per entry to the new repository.
573 Will add a commit per entry to the new repository.
595 :param number_of_commits: Optional. If set to a number, this number of
574 :param number_of_commits: Optional. If set to a number, this number of
596 commits will be added to the new repository.
575 commits will be added to the new repository.
597 :param heads: Optional. Can be set to a sequence of of commit
576 :param heads: Optional. Can be set to a sequence of of commit
598 names which shall be pulled in from the master repository.
577 names which shall be pulled in from the master repository.
599
578
600 """
579 """
601 self.repo_name = self._next_repo_name() + name_suffix
580 self.repo_name = self._next_repo_name() + name_suffix
602 repo = self._fixture.create_repo(
581 repo = self._fixture.create_repo(
603 self.repo_name, repo_type=self.alias, **kwargs)
582 self.repo_name, repo_type=self.alias, **kwargs)
604 self._cleanup_repos.append(repo.repo_name)
583 self._cleanup_repos.append(repo.repo_name)
605
584
606 commits = commits or [
585 commits = commits or [
607 {'message': 'Commit %s of %s' % (x, self.repo_name)}
586 {'message': 'Commit %s of %s' % (x, self.repo_name)}
608 for x in xrange(number_of_commits)]
587 for x in xrange(number_of_commits)]
609 self._add_commits_to_repo(repo.scm_instance(), commits)
588 self._add_commits_to_repo(repo.scm_instance(), commits)
610 if heads:
589 if heads:
611 self.pull_heads(repo, heads)
590 self.pull_heads(repo, heads)
612
591
613 return repo
592 return repo
614
593
615 def pull_heads(self, repo, heads):
594 def pull_heads(self, repo, heads):
616 """
595 """
617 Make sure that repo contains all commits mentioned in `heads`
596 Make sure that repo contains all commits mentioned in `heads`
618 """
597 """
619 vcsmaster = self._master_repo.scm_instance()
598 vcsmaster = self._master_repo.scm_instance()
620 vcsrepo = repo.scm_instance()
599 vcsrepo = repo.scm_instance()
621 vcsrepo.config.clear_section('hooks')
600 vcsrepo.config.clear_section('hooks')
622 commit_ids = [self._commit_ids[h] for h in heads]
601 commit_ids = [self._commit_ids[h] for h in heads]
623 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
602 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
624
603
625 def create_fork(self):
604 def create_fork(self):
626 repo_to_fork = self.repo_name
605 repo_to_fork = self.repo_name
627 self.repo_name = self._next_repo_name()
606 self.repo_name = self._next_repo_name()
628 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
607 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
629 self._cleanup_repos.append(self.repo_name)
608 self._cleanup_repos.append(self.repo_name)
630 return repo
609 return repo
631
610
632 def new_repo_name(self, suffix=u''):
611 def new_repo_name(self, suffix=u''):
633 self.repo_name = self._next_repo_name() + suffix
612 self.repo_name = self._next_repo_name() + suffix
634 self._cleanup_repos.append(self.repo_name)
613 self._cleanup_repos.append(self.repo_name)
635 return self.repo_name
614 return self.repo_name
636
615
637 def _next_repo_name(self):
616 def _next_repo_name(self):
638 return u"%s_%s" % (
617 return u"%s_%s" % (
639 self.invalid_repo_name.sub(u'_', self._test_name),
618 self.invalid_repo_name.sub(u'_', self._test_name),
640 len(self._cleanup_repos))
619 len(self._cleanup_repos))
641
620
642 def ensure_file(self, filename, content='Test content\n'):
621 def ensure_file(self, filename, content='Test content\n'):
643 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
622 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
644 commits = [
623 commits = [
645 {'added': [
624 {'added': [
646 FileNode(filename, content=content),
625 FileNode(filename, content=content),
647 ]},
626 ]},
648 ]
627 ]
649 self._add_commits_to_repo(self.repo.scm_instance(), commits)
628 self._add_commits_to_repo(self.repo.scm_instance(), commits)
650
629
651 def enable_downloads(self):
630 def enable_downloads(self):
652 repo = self.repo
631 repo = self.repo
653 repo.enable_downloads = True
632 repo.enable_downloads = True
654 Session().add(repo)
633 Session().add(repo)
655 Session().commit()
634 Session().commit()
656
635
657 def cleanup(self):
636 def cleanup(self):
658 for repo_name in reversed(self._cleanup_repos):
637 for repo_name in reversed(self._cleanup_repos):
659 self._fixture.destroy_repo(repo_name)
638 self._fixture.destroy_repo(repo_name)
660
639
661 def _add_commits_to_repo(self, repo, commits):
640 def _add_commits_to_repo(self, repo, commits):
662 commit_ids = _add_commits_to_repo(repo, commits)
641 commit_ids = _add_commits_to_repo(repo, commits)
663 if not commit_ids:
642 if not commit_ids:
664 return
643 return
665 self._commit_ids = commit_ids
644 self._commit_ids = commit_ids
666
645
667 # Creating refs for Git to allow fetching them from remote repository
646 # Creating refs for Git to allow fetching them from remote repository
668 if self.alias == 'git':
647 if self.alias == 'git':
669 refs = {}
648 refs = {}
670 for message in self._commit_ids:
649 for message in self._commit_ids:
671 # TODO: mikhail: do more special chars replacements
650 # TODO: mikhail: do more special chars replacements
672 ref_name = 'refs/test-refs/{}'.format(
651 ref_name = 'refs/test-refs/{}'.format(
673 message.replace(' ', ''))
652 message.replace(' ', ''))
674 refs[ref_name] = self._commit_ids[message]
653 refs[ref_name] = self._commit_ids[message]
675 self._create_refs(repo, refs)
654 self._create_refs(repo, refs)
676
655
677 def _create_refs(self, repo, refs):
656 def _create_refs(self, repo, refs):
678 for ref_name in refs:
657 for ref_name in refs:
679 repo.set_refs(ref_name, refs[ref_name])
658 repo.set_refs(ref_name, refs[ref_name])
680
659
681
660
682 @pytest.fixture
661 @pytest.fixture
683 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
662 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
684 """
663 """
685 Parametrized fixture which represents a single vcs backend implementation.
664 Parametrized fixture which represents a single vcs backend implementation.
686
665
687 See the fixture `backend` for more details. This one implements the same
666 See the fixture `backend` for more details. This one implements the same
688 concept, but on vcs level. So it does not provide model instances etc.
667 concept, but on vcs level. So it does not provide model instances etc.
689
668
690 Parameters are generated dynamically, see :func:`pytest_generate_tests`
669 Parameters are generated dynamically, see :func:`pytest_generate_tests`
691 for how this works.
670 for how this works.
692 """
671 """
693 if backend_alias not in request.config.getoption('--backends'):
672 if backend_alias not in request.config.getoption('--backends'):
694 pytest.skip("Backend %s not selected." % (backend_alias, ))
673 pytest.skip("Backend %s not selected." % (backend_alias, ))
695
674
696 utils.check_xfail_backends(request.node, backend_alias)
675 utils.check_xfail_backends(request.node, backend_alias)
697 utils.check_skip_backends(request.node, backend_alias)
676 utils.check_skip_backends(request.node, backend_alias)
698
677
699 repo_name = 'vcs_test_%s' % (backend_alias, )
678 repo_name = 'vcs_test_%s' % (backend_alias, )
700 repo_path = os.path.join(tests_tmp_path, repo_name)
679 repo_path = os.path.join(tests_tmp_path, repo_name)
701 backend = VcsBackend(
680 backend = VcsBackend(
702 alias=backend_alias,
681 alias=backend_alias,
703 repo_path=repo_path,
682 repo_path=repo_path,
704 test_name=request.node.name,
683 test_name=request.node.name,
705 test_repo_container=test_repo)
684 test_repo_container=test_repo)
706 request.addfinalizer(backend.cleanup)
685 request.addfinalizer(backend.cleanup)
707 return backend
686 return backend
708
687
709
688
710 @pytest.fixture
689 @pytest.fixture
711 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
690 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
712 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
691 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
713
692
714
693
715 @pytest.fixture
694 @pytest.fixture
716 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
695 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
717 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
696 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
718
697
719
698
720 @pytest.fixture
699 @pytest.fixture
721 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
700 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
722 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
701 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
723
702
724
703
725 @pytest.fixture
704 @pytest.fixture
726 def vcsbackend_random(vcsbackend_git):
705 def vcsbackend_random(vcsbackend_git):
727 """
706 """
728 Use this to express that your tests need "a vcsbackend".
707 Use this to express that your tests need "a vcsbackend".
729
708
730 The fixture `vcsbackend` would run the test multiple times for each
709 The fixture `vcsbackend` would run the test multiple times for each
731 available vcs backend which is a pure waste of time if the test is
710 available vcs backend which is a pure waste of time if the test is
732 independent of the vcs backend type.
711 independent of the vcs backend type.
733 """
712 """
734 # TODO: johbo: Change this to pick a random backend
713 # TODO: johbo: Change this to pick a random backend
735 return vcsbackend_git
714 return vcsbackend_git
736
715
737
716
738 @pytest.fixture
717 @pytest.fixture
739 def vcsbackend_stub(vcsbackend_git):
718 def vcsbackend_stub(vcsbackend_git):
740 """
719 """
741 Use this to express that your test just needs a stub of a vcsbackend.
720 Use this to express that your test just needs a stub of a vcsbackend.
742
721
743 Plan is to eventually implement an in-memory stub to speed tests up.
722 Plan is to eventually implement an in-memory stub to speed tests up.
744 """
723 """
745 return vcsbackend_git
724 return vcsbackend_git
746
725
747
726
748 class VcsBackend(object):
727 class VcsBackend(object):
749 """
728 """
750 Represents the test configuration for one supported vcs backend.
729 Represents the test configuration for one supported vcs backend.
751 """
730 """
752
731
753 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
732 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
754
733
755 def __init__(self, alias, repo_path, test_name, test_repo_container):
734 def __init__(self, alias, repo_path, test_name, test_repo_container):
756 self.alias = alias
735 self.alias = alias
757 self._repo_path = repo_path
736 self._repo_path = repo_path
758 self._cleanup_repos = []
737 self._cleanup_repos = []
759 self._test_name = test_name
738 self._test_name = test_name
760 self._test_repo_container = test_repo_container
739 self._test_repo_container = test_repo_container
761
740
762 def __getitem__(self, key):
741 def __getitem__(self, key):
763 return self._test_repo_container(key, self.alias).scm_instance()
742 return self._test_repo_container(key, self.alias).scm_instance()
764
743
765 @property
744 @property
766 def repo(self):
745 def repo(self):
767 """
746 """
768 Returns the "current" repository. This is the vcs_test repo of the last
747 Returns the "current" repository. This is the vcs_test repo of the last
769 repo which has been created.
748 repo which has been created.
770 """
749 """
771 Repository = get_backend(self.alias)
750 Repository = get_backend(self.alias)
772 return Repository(self._repo_path)
751 return Repository(self._repo_path)
773
752
774 @property
753 @property
775 def backend(self):
754 def backend(self):
776 """
755 """
777 Returns the backend implementation class.
756 Returns the backend implementation class.
778 """
757 """
779 return get_backend(self.alias)
758 return get_backend(self.alias)
780
759
781 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
760 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
782 repo_name = self._next_repo_name()
761 repo_name = self._next_repo_name()
783 self._repo_path = get_new_dir(repo_name)
762 self._repo_path = get_new_dir(repo_name)
784 repo_class = get_backend(self.alias)
763 repo_class = get_backend(self.alias)
785 src_url = None
764 src_url = None
786 if _clone_repo:
765 if _clone_repo:
787 src_url = _clone_repo.path
766 src_url = _clone_repo.path
788 repo = repo_class(self._repo_path, create=True, src_url=src_url)
767 repo = repo_class(self._repo_path, create=True, src_url=src_url)
789 self._cleanup_repos.append(repo)
768 self._cleanup_repos.append(repo)
790
769
791 commits = commits or [
770 commits = commits or [
792 {'message': 'Commit %s of %s' % (x, repo_name)}
771 {'message': 'Commit %s of %s' % (x, repo_name)}
793 for x in xrange(number_of_commits)]
772 for x in xrange(number_of_commits)]
794 _add_commits_to_repo(repo, commits)
773 _add_commits_to_repo(repo, commits)
795 return repo
774 return repo
796
775
797 def clone_repo(self, repo):
776 def clone_repo(self, repo):
798 return self.create_repo(_clone_repo=repo)
777 return self.create_repo(_clone_repo=repo)
799
778
800 def cleanup(self):
779 def cleanup(self):
801 for repo in self._cleanup_repos:
780 for repo in self._cleanup_repos:
802 shutil.rmtree(repo.path)
781 shutil.rmtree(repo.path)
803
782
804 def new_repo_path(self):
783 def new_repo_path(self):
805 repo_name = self._next_repo_name()
784 repo_name = self._next_repo_name()
806 self._repo_path = get_new_dir(repo_name)
785 self._repo_path = get_new_dir(repo_name)
807 return self._repo_path
786 return self._repo_path
808
787
809 def _next_repo_name(self):
788 def _next_repo_name(self):
810 return "%s_%s" % (
789 return "%s_%s" % (
811 self.invalid_repo_name.sub('_', self._test_name),
790 self.invalid_repo_name.sub('_', self._test_name),
812 len(self._cleanup_repos))
791 len(self._cleanup_repos))
813
792
814 def add_file(self, repo, filename, content='Test content\n'):
793 def add_file(self, repo, filename, content='Test content\n'):
815 imc = repo.in_memory_commit
794 imc = repo.in_memory_commit
816 imc.add(FileNode(filename, content=content))
795 imc.add(FileNode(filename, content=content))
817 imc.commit(
796 imc.commit(
818 message=u'Automatic commit from vcsbackend fixture',
797 message=u'Automatic commit from vcsbackend fixture',
819 author=u'Automatic')
798 author=u'Automatic')
820
799
821 def ensure_file(self, filename, content='Test content\n'):
800 def ensure_file(self, filename, content='Test content\n'):
822 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
801 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
823 self.add_file(self.repo, filename, content)
802 self.add_file(self.repo, filename, content)
824
803
825
804
826 def _add_commits_to_repo(vcs_repo, commits):
805 def _add_commits_to_repo(vcs_repo, commits):
827 commit_ids = {}
806 commit_ids = {}
828 if not commits:
807 if not commits:
829 return commit_ids
808 return commit_ids
830
809
831 imc = vcs_repo.in_memory_commit
810 imc = vcs_repo.in_memory_commit
832 commit = None
811 commit = None
833
812
834 for idx, commit in enumerate(commits):
813 for idx, commit in enumerate(commits):
835 message = unicode(commit.get('message', 'Commit %s' % idx))
814 message = unicode(commit.get('message', 'Commit %s' % idx))
836
815
837 for node in commit.get('added', []):
816 for node in commit.get('added', []):
838 imc.add(FileNode(node.path, content=node.content))
817 imc.add(FileNode(node.path, content=node.content))
839 for node in commit.get('changed', []):
818 for node in commit.get('changed', []):
840 imc.change(FileNode(node.path, content=node.content))
819 imc.change(FileNode(node.path, content=node.content))
841 for node in commit.get('removed', []):
820 for node in commit.get('removed', []):
842 imc.remove(FileNode(node.path))
821 imc.remove(FileNode(node.path))
843
822
844 parents = [
823 parents = [
845 vcs_repo.get_commit(commit_id=commit_ids[p])
824 vcs_repo.get_commit(commit_id=commit_ids[p])
846 for p in commit.get('parents', [])]
825 for p in commit.get('parents', [])]
847
826
848 operations = ('added', 'changed', 'removed')
827 operations = ('added', 'changed', 'removed')
849 if not any((commit.get(o) for o in operations)):
828 if not any((commit.get(o) for o in operations)):
850 imc.add(FileNode('file_%s' % idx, content=message))
829 imc.add(FileNode('file_%s' % idx, content=message))
851
830
852 commit = imc.commit(
831 commit = imc.commit(
853 message=message,
832 message=message,
854 author=unicode(commit.get('author', 'Automatic')),
833 author=unicode(commit.get('author', 'Automatic')),
855 date=commit.get('date'),
834 date=commit.get('date'),
856 branch=commit.get('branch'),
835 branch=commit.get('branch'),
857 parents=parents)
836 parents=parents)
858
837
859 commit_ids[commit.message] = commit.raw_id
838 commit_ids[commit.message] = commit.raw_id
860
839
861 return commit_ids
840 return commit_ids
862
841
863
842
864 @pytest.fixture
843 @pytest.fixture
865 def reposerver(request):
844 def reposerver(request):
866 """
845 """
867 Allows to serve a backend repository
846 Allows to serve a backend repository
868 """
847 """
869
848
870 repo_server = RepoServer()
849 repo_server = RepoServer()
871 request.addfinalizer(repo_server.cleanup)
850 request.addfinalizer(repo_server.cleanup)
872 return repo_server
851 return repo_server
873
852
874
853
875 class RepoServer(object):
854 class RepoServer(object):
876 """
855 """
877 Utility to serve a local repository for the duration of a test case.
856 Utility to serve a local repository for the duration of a test case.
878
857
879 Supports only Subversion so far.
858 Supports only Subversion so far.
880 """
859 """
881
860
882 url = None
861 url = None
883
862
884 def __init__(self):
863 def __init__(self):
885 self._cleanup_servers = []
864 self._cleanup_servers = []
886
865
887 def serve(self, vcsrepo):
866 def serve(self, vcsrepo):
888 if vcsrepo.alias != 'svn':
867 if vcsrepo.alias != 'svn':
889 raise TypeError("Backend %s not supported" % vcsrepo.alias)
868 raise TypeError("Backend %s not supported" % vcsrepo.alias)
890
869
891 proc = subprocess32.Popen(
870 proc = subprocess32.Popen(
892 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
871 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
893 '--root', vcsrepo.path])
872 '--root', vcsrepo.path])
894 self._cleanup_servers.append(proc)
873 self._cleanup_servers.append(proc)
895 self.url = 'svn://localhost'
874 self.url = 'svn://localhost'
896
875
897 def cleanup(self):
876 def cleanup(self):
898 for proc in self._cleanup_servers:
877 for proc in self._cleanup_servers:
899 proc.terminate()
878 proc.terminate()
900
879
901
880
902 @pytest.fixture
881 @pytest.fixture
903 def pr_util(backend, request):
882 def pr_util(backend, request):
904 """
883 """
905 Utility for tests of models and for functional tests around pull requests.
884 Utility for tests of models and for functional tests around pull requests.
906
885
907 It gives an instance of :class:`PRTestUtility` which provides various
886 It gives an instance of :class:`PRTestUtility` which provides various
908 utility methods around one pull request.
887 utility methods around one pull request.
909
888
910 This fixture uses `backend` and inherits its parameterization.
889 This fixture uses `backend` and inherits its parameterization.
911 """
890 """
912
891
913 util = PRTestUtility(backend)
892 util = PRTestUtility(backend)
914
893
915 @request.addfinalizer
894 @request.addfinalizer
916 def cleanup():
895 def cleanup():
917 util.cleanup()
896 util.cleanup()
918
897
919 return util
898 return util
920
899
921
900
922 class PRTestUtility(object):
901 class PRTestUtility(object):
923
902
924 pull_request = None
903 pull_request = None
925 pull_request_id = None
904 pull_request_id = None
926 mergeable_patcher = None
905 mergeable_patcher = None
927 mergeable_mock = None
906 mergeable_mock = None
928 notification_patcher = None
907 notification_patcher = None
929
908
930 def __init__(self, backend):
909 def __init__(self, backend):
931 self.backend = backend
910 self.backend = backend
932
911
933 def create_pull_request(
912 def create_pull_request(
934 self, commits=None, target_head=None, source_head=None,
913 self, commits=None, target_head=None, source_head=None,
935 revisions=None, approved=False, author=None, mergeable=False,
914 revisions=None, approved=False, author=None, mergeable=False,
936 enable_notifications=True, name_suffix=u'', reviewers=None,
915 enable_notifications=True, name_suffix=u'', reviewers=None,
937 title=u"Test", description=u"Description"):
916 title=u"Test", description=u"Description"):
938 self.set_mergeable(mergeable)
917 self.set_mergeable(mergeable)
939 if not enable_notifications:
918 if not enable_notifications:
940 # mock notification side effect
919 # mock notification side effect
941 self.notification_patcher = mock.patch(
920 self.notification_patcher = mock.patch(
942 'rhodecode.model.notification.NotificationModel.create')
921 'rhodecode.model.notification.NotificationModel.create')
943 self.notification_patcher.start()
922 self.notification_patcher.start()
944
923
945 if not self.pull_request:
924 if not self.pull_request:
946 if not commits:
925 if not commits:
947 commits = [
926 commits = [
948 {'message': 'c1'},
927 {'message': 'c1'},
949 {'message': 'c2'},
928 {'message': 'c2'},
950 {'message': 'c3'},
929 {'message': 'c3'},
951 ]
930 ]
952 target_head = 'c1'
931 target_head = 'c1'
953 source_head = 'c2'
932 source_head = 'c2'
954 revisions = ['c2']
933 revisions = ['c2']
955
934
956 self.commit_ids = self.backend.create_master_repo(commits)
935 self.commit_ids = self.backend.create_master_repo(commits)
957 self.target_repository = self.backend.create_repo(
936 self.target_repository = self.backend.create_repo(
958 heads=[target_head], name_suffix=name_suffix)
937 heads=[target_head], name_suffix=name_suffix)
959 self.source_repository = self.backend.create_repo(
938 self.source_repository = self.backend.create_repo(
960 heads=[source_head], name_suffix=name_suffix)
939 heads=[source_head], name_suffix=name_suffix)
961 self.author = author or UserModel().get_by_username(
940 self.author = author or UserModel().get_by_username(
962 TEST_USER_ADMIN_LOGIN)
941 TEST_USER_ADMIN_LOGIN)
963
942
964 model = PullRequestModel()
943 model = PullRequestModel()
965 self.create_parameters = {
944 self.create_parameters = {
966 'created_by': self.author,
945 'created_by': self.author,
967 'source_repo': self.source_repository.repo_name,
946 'source_repo': self.source_repository.repo_name,
968 'source_ref': self._default_branch_reference(source_head),
947 'source_ref': self._default_branch_reference(source_head),
969 'target_repo': self.target_repository.repo_name,
948 'target_repo': self.target_repository.repo_name,
970 'target_ref': self._default_branch_reference(target_head),
949 'target_ref': self._default_branch_reference(target_head),
971 'revisions': [self.commit_ids[r] for r in revisions],
950 'revisions': [self.commit_ids[r] for r in revisions],
972 'reviewers': reviewers or self._get_reviewers(),
951 'reviewers': reviewers or self._get_reviewers(),
973 'title': title,
952 'title': title,
974 'description': description,
953 'description': description,
975 }
954 }
976 self.pull_request = model.create(**self.create_parameters)
955 self.pull_request = model.create(**self.create_parameters)
977 assert model.get_versions(self.pull_request) == []
956 assert model.get_versions(self.pull_request) == []
978
957
979 self.pull_request_id = self.pull_request.pull_request_id
958 self.pull_request_id = self.pull_request.pull_request_id
980
959
981 if approved:
960 if approved:
982 self.approve()
961 self.approve()
983
962
984 Session().add(self.pull_request)
963 Session().add(self.pull_request)
985 Session().commit()
964 Session().commit()
986
965
987 return self.pull_request
966 return self.pull_request
988
967
989 def approve(self):
968 def approve(self):
990 self.create_status_votes(
969 self.create_status_votes(
991 ChangesetStatus.STATUS_APPROVED,
970 ChangesetStatus.STATUS_APPROVED,
992 *self.pull_request.reviewers)
971 *self.pull_request.reviewers)
993
972
994 def close(self):
973 def close(self):
995 PullRequestModel().close_pull_request(self.pull_request, self.author)
974 PullRequestModel().close_pull_request(self.pull_request, self.author)
996
975
997 def _default_branch_reference(self, commit_message):
976 def _default_branch_reference(self, commit_message):
998 reference = '%s:%s:%s' % (
977 reference = '%s:%s:%s' % (
999 'branch',
978 'branch',
1000 self.backend.default_branch_name,
979 self.backend.default_branch_name,
1001 self.commit_ids[commit_message])
980 self.commit_ids[commit_message])
1002 return reference
981 return reference
1003
982
1004 def _get_reviewers(self):
983 def _get_reviewers(self):
1005 model = UserModel()
984 model = UserModel()
1006 return [
985 return [
1007 model.get_by_username(TEST_USER_REGULAR_LOGIN),
986 model.get_by_username(TEST_USER_REGULAR_LOGIN),
1008 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
987 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
1009 ]
988 ]
1010
989
1011 def update_source_repository(self, head=None):
990 def update_source_repository(self, head=None):
1012 heads = [head or 'c3']
991 heads = [head or 'c3']
1013 self.backend.pull_heads(self.source_repository, heads=heads)
992 self.backend.pull_heads(self.source_repository, heads=heads)
1014
993
1015 def add_one_commit(self, head=None):
994 def add_one_commit(self, head=None):
1016 self.update_source_repository(head=head)
995 self.update_source_repository(head=head)
1017 old_commit_ids = set(self.pull_request.revisions)
996 old_commit_ids = set(self.pull_request.revisions)
1018 PullRequestModel().update_commits(self.pull_request)
997 PullRequestModel().update_commits(self.pull_request)
1019 commit_ids = set(self.pull_request.revisions)
998 commit_ids = set(self.pull_request.revisions)
1020 new_commit_ids = commit_ids - old_commit_ids
999 new_commit_ids = commit_ids - old_commit_ids
1021 assert len(new_commit_ids) == 1
1000 assert len(new_commit_ids) == 1
1022 return new_commit_ids.pop()
1001 return new_commit_ids.pop()
1023
1002
1024 def remove_one_commit(self):
1003 def remove_one_commit(self):
1025 assert len(self.pull_request.revisions) == 2
1004 assert len(self.pull_request.revisions) == 2
1026 source_vcs = self.source_repository.scm_instance()
1005 source_vcs = self.source_repository.scm_instance()
1027 removed_commit_id = source_vcs.commit_ids[-1]
1006 removed_commit_id = source_vcs.commit_ids[-1]
1028
1007
1029 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1008 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1030 # remove the if once that's sorted out.
1009 # remove the if once that's sorted out.
1031 if self.backend.alias == "git":
1010 if self.backend.alias == "git":
1032 kwargs = {'branch_name': self.backend.default_branch_name}
1011 kwargs = {'branch_name': self.backend.default_branch_name}
1033 else:
1012 else:
1034 kwargs = {}
1013 kwargs = {}
1035 source_vcs.strip(removed_commit_id, **kwargs)
1014 source_vcs.strip(removed_commit_id, **kwargs)
1036
1015
1037 PullRequestModel().update_commits(self.pull_request)
1016 PullRequestModel().update_commits(self.pull_request)
1038 assert len(self.pull_request.revisions) == 1
1017 assert len(self.pull_request.revisions) == 1
1039 return removed_commit_id
1018 return removed_commit_id
1040
1019
1041 def create_comment(self, linked_to=None):
1020 def create_comment(self, linked_to=None):
1042 comment = CommentsModel().create(
1021 comment = CommentsModel().create(
1043 text=u"Test comment",
1022 text=u"Test comment",
1044 repo=self.target_repository.repo_name,
1023 repo=self.target_repository.repo_name,
1045 user=self.author,
1024 user=self.author,
1046 pull_request=self.pull_request)
1025 pull_request=self.pull_request)
1047 assert comment.pull_request_version_id is None
1026 assert comment.pull_request_version_id is None
1048
1027
1049 if linked_to:
1028 if linked_to:
1050 PullRequestModel()._link_comments_to_version(linked_to)
1029 PullRequestModel()._link_comments_to_version(linked_to)
1051
1030
1052 return comment
1031 return comment
1053
1032
1054 def create_inline_comment(
1033 def create_inline_comment(
1055 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1034 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1056 comment = CommentsModel().create(
1035 comment = CommentsModel().create(
1057 text=u"Test comment",
1036 text=u"Test comment",
1058 repo=self.target_repository.repo_name,
1037 repo=self.target_repository.repo_name,
1059 user=self.author,
1038 user=self.author,
1060 line_no=line_no,
1039 line_no=line_no,
1061 f_path=file_path,
1040 f_path=file_path,
1062 pull_request=self.pull_request)
1041 pull_request=self.pull_request)
1063 assert comment.pull_request_version_id is None
1042 assert comment.pull_request_version_id is None
1064
1043
1065 if linked_to:
1044 if linked_to:
1066 PullRequestModel()._link_comments_to_version(linked_to)
1045 PullRequestModel()._link_comments_to_version(linked_to)
1067
1046
1068 return comment
1047 return comment
1069
1048
1070 def create_version_of_pull_request(self):
1049 def create_version_of_pull_request(self):
1071 pull_request = self.create_pull_request()
1050 pull_request = self.create_pull_request()
1072 version = PullRequestModel()._create_version_from_snapshot(
1051 version = PullRequestModel()._create_version_from_snapshot(
1073 pull_request)
1052 pull_request)
1074 return version
1053 return version
1075
1054
1076 def create_status_votes(self, status, *reviewers):
1055 def create_status_votes(self, status, *reviewers):
1077 for reviewer in reviewers:
1056 for reviewer in reviewers:
1078 ChangesetStatusModel().set_status(
1057 ChangesetStatusModel().set_status(
1079 repo=self.pull_request.target_repo,
1058 repo=self.pull_request.target_repo,
1080 status=status,
1059 status=status,
1081 user=reviewer.user_id,
1060 user=reviewer.user_id,
1082 pull_request=self.pull_request)
1061 pull_request=self.pull_request)
1083
1062
1084 def set_mergeable(self, value):
1063 def set_mergeable(self, value):
1085 if not self.mergeable_patcher:
1064 if not self.mergeable_patcher:
1086 self.mergeable_patcher = mock.patch.object(
1065 self.mergeable_patcher = mock.patch.object(
1087 VcsSettingsModel, 'get_general_settings')
1066 VcsSettingsModel, 'get_general_settings')
1088 self.mergeable_mock = self.mergeable_patcher.start()
1067 self.mergeable_mock = self.mergeable_patcher.start()
1089 self.mergeable_mock.return_value = {
1068 self.mergeable_mock.return_value = {
1090 'rhodecode_pr_merge_enabled': value}
1069 'rhodecode_pr_merge_enabled': value}
1091
1070
1092 def cleanup(self):
1071 def cleanup(self):
1093 # In case the source repository is already cleaned up, the pull
1072 # In case the source repository is already cleaned up, the pull
1094 # request will already be deleted.
1073 # request will already be deleted.
1095 pull_request = PullRequest().get(self.pull_request_id)
1074 pull_request = PullRequest().get(self.pull_request_id)
1096 if pull_request:
1075 if pull_request:
1097 PullRequestModel().delete(pull_request)
1076 PullRequestModel().delete(pull_request)
1098 Session().commit()
1077 Session().commit()
1099
1078
1100 if self.notification_patcher:
1079 if self.notification_patcher:
1101 self.notification_patcher.stop()
1080 self.notification_patcher.stop()
1102
1081
1103 if self.mergeable_patcher:
1082 if self.mergeable_patcher:
1104 self.mergeable_patcher.stop()
1083 self.mergeable_patcher.stop()
1105
1084
1106
1085
1107 @pytest.fixture
1086 @pytest.fixture
1108 def user_admin(pylonsapp):
1087 def user_admin(pylonsapp):
1109 """
1088 """
1110 Provides the default admin test user as an instance of `db.User`.
1089 Provides the default admin test user as an instance of `db.User`.
1111 """
1090 """
1112 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1091 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1113 return user
1092 return user
1114
1093
1115
1094
1116 @pytest.fixture
1095 @pytest.fixture
1117 def user_regular(pylonsapp):
1096 def user_regular(pylonsapp):
1118 """
1097 """
1119 Provides the default regular test user as an instance of `db.User`.
1098 Provides the default regular test user as an instance of `db.User`.
1120 """
1099 """
1121 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1100 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1122 return user
1101 return user
1123
1102
1124
1103
1125 @pytest.fixture
1104 @pytest.fixture
1126 def user_util(request, pylonsapp):
1105 def user_util(request, pylonsapp):
1127 """
1106 """
1128 Provides a wired instance of `UserUtility` with integrated cleanup.
1107 Provides a wired instance of `UserUtility` with integrated cleanup.
1129 """
1108 """
1130 utility = UserUtility(test_name=request.node.name)
1109 utility = UserUtility(test_name=request.node.name)
1131 request.addfinalizer(utility.cleanup)
1110 request.addfinalizer(utility.cleanup)
1132 return utility
1111 return utility
1133
1112
1134
1113
1135 # TODO: johbo: Split this up into utilities per domain or something similar
1114 # TODO: johbo: Split this up into utilities per domain or something similar
1136 class UserUtility(object):
1115 class UserUtility(object):
1137
1116
1138 def __init__(self, test_name="test"):
1117 def __init__(self, test_name="test"):
1139 self._test_name = self._sanitize_name(test_name)
1118 self._test_name = self._sanitize_name(test_name)
1140 self.fixture = Fixture()
1119 self.fixture = Fixture()
1141 self.repo_group_ids = []
1120 self.repo_group_ids = []
1142 self.repos_ids = []
1121 self.repos_ids = []
1143 self.user_ids = []
1122 self.user_ids = []
1144 self.user_group_ids = []
1123 self.user_group_ids = []
1145 self.user_repo_permission_ids = []
1124 self.user_repo_permission_ids = []
1146 self.user_group_repo_permission_ids = []
1125 self.user_group_repo_permission_ids = []
1147 self.user_repo_group_permission_ids = []
1126 self.user_repo_group_permission_ids = []
1148 self.user_group_repo_group_permission_ids = []
1127 self.user_group_repo_group_permission_ids = []
1149 self.user_user_group_permission_ids = []
1128 self.user_user_group_permission_ids = []
1150 self.user_group_user_group_permission_ids = []
1129 self.user_group_user_group_permission_ids = []
1151 self.user_permissions = []
1130 self.user_permissions = []
1152
1131
1153 def _sanitize_name(self, name):
1132 def _sanitize_name(self, name):
1154 for char in ['[', ']']:
1133 for char in ['[', ']']:
1155 name = name.replace(char, '_')
1134 name = name.replace(char, '_')
1156 return name
1135 return name
1157
1136
1158 def create_repo_group(
1137 def create_repo_group(
1159 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1138 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1160 group_name = "{prefix}_repogroup_{count}".format(
1139 group_name = "{prefix}_repogroup_{count}".format(
1161 prefix=self._test_name,
1140 prefix=self._test_name,
1162 count=len(self.repo_group_ids))
1141 count=len(self.repo_group_ids))
1163 repo_group = self.fixture.create_repo_group(
1142 repo_group = self.fixture.create_repo_group(
1164 group_name, cur_user=owner)
1143 group_name, cur_user=owner)
1165 if auto_cleanup:
1144 if auto_cleanup:
1166 self.repo_group_ids.append(repo_group.group_id)
1145 self.repo_group_ids.append(repo_group.group_id)
1167 return repo_group
1146 return repo_group
1168
1147
1169 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True):
1148 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True):
1170 repo_name = "{prefix}_repository_{count}".format(
1149 repo_name = "{prefix}_repository_{count}".format(
1171 prefix=self._test_name,
1150 prefix=self._test_name,
1172 count=len(self.repos_ids))
1151 count=len(self.repos_ids))
1173
1152
1174 repository = self.fixture.create_repo(
1153 repository = self.fixture.create_repo(
1175 repo_name, cur_user=owner, repo_group=parent)
1154 repo_name, cur_user=owner, repo_group=parent)
1176 if auto_cleanup:
1155 if auto_cleanup:
1177 self.repos_ids.append(repository.repo_id)
1156 self.repos_ids.append(repository.repo_id)
1178 return repository
1157 return repository
1179
1158
1180 def create_user(self, auto_cleanup=True, **kwargs):
1159 def create_user(self, auto_cleanup=True, **kwargs):
1181 user_name = "{prefix}_user_{count}".format(
1160 user_name = "{prefix}_user_{count}".format(
1182 prefix=self._test_name,
1161 prefix=self._test_name,
1183 count=len(self.user_ids))
1162 count=len(self.user_ids))
1184 user = self.fixture.create_user(user_name, **kwargs)
1163 user = self.fixture.create_user(user_name, **kwargs)
1185 if auto_cleanup:
1164 if auto_cleanup:
1186 self.user_ids.append(user.user_id)
1165 self.user_ids.append(user.user_id)
1187 return user
1166 return user
1188
1167
1189 def create_user_with_group(self):
1168 def create_user_with_group(self):
1190 user = self.create_user()
1169 user = self.create_user()
1191 user_group = self.create_user_group(members=[user])
1170 user_group = self.create_user_group(members=[user])
1192 return user, user_group
1171 return user, user_group
1193
1172
1194 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1173 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1195 group_name = "{prefix}_usergroup_{count}".format(
1174 group_name = "{prefix}_usergroup_{count}".format(
1196 prefix=self._test_name,
1175 prefix=self._test_name,
1197 count=len(self.user_group_ids))
1176 count=len(self.user_group_ids))
1198 user_group = self.fixture.create_user_group(group_name, **kwargs)
1177 user_group = self.fixture.create_user_group(group_name, **kwargs)
1199 if auto_cleanup:
1178 if auto_cleanup:
1200 self.user_group_ids.append(user_group.users_group_id)
1179 self.user_group_ids.append(user_group.users_group_id)
1201 if members:
1180 if members:
1202 for user in members:
1181 for user in members:
1203 UserGroupModel().add_user_to_group(user_group, user)
1182 UserGroupModel().add_user_to_group(user_group, user)
1204 return user_group
1183 return user_group
1205
1184
1206 def grant_user_permission(self, user_name, permission_name):
1185 def grant_user_permission(self, user_name, permission_name):
1207 self._inherit_default_user_permissions(user_name, False)
1186 self._inherit_default_user_permissions(user_name, False)
1208 self.user_permissions.append((user_name, permission_name))
1187 self.user_permissions.append((user_name, permission_name))
1209
1188
1210 def grant_user_permission_to_repo_group(
1189 def grant_user_permission_to_repo_group(
1211 self, repo_group, user, permission_name):
1190 self, repo_group, user, permission_name):
1212 permission = RepoGroupModel().grant_user_permission(
1191 permission = RepoGroupModel().grant_user_permission(
1213 repo_group, user, permission_name)
1192 repo_group, user, permission_name)
1214 self.user_repo_group_permission_ids.append(
1193 self.user_repo_group_permission_ids.append(
1215 (repo_group.group_id, user.user_id))
1194 (repo_group.group_id, user.user_id))
1216 return permission
1195 return permission
1217
1196
1218 def grant_user_group_permission_to_repo_group(
1197 def grant_user_group_permission_to_repo_group(
1219 self, repo_group, user_group, permission_name):
1198 self, repo_group, user_group, permission_name):
1220 permission = RepoGroupModel().grant_user_group_permission(
1199 permission = RepoGroupModel().grant_user_group_permission(
1221 repo_group, user_group, permission_name)
1200 repo_group, user_group, permission_name)
1222 self.user_group_repo_group_permission_ids.append(
1201 self.user_group_repo_group_permission_ids.append(
1223 (repo_group.group_id, user_group.users_group_id))
1202 (repo_group.group_id, user_group.users_group_id))
1224 return permission
1203 return permission
1225
1204
1226 def grant_user_permission_to_repo(
1205 def grant_user_permission_to_repo(
1227 self, repo, user, permission_name):
1206 self, repo, user, permission_name):
1228 permission = RepoModel().grant_user_permission(
1207 permission = RepoModel().grant_user_permission(
1229 repo, user, permission_name)
1208 repo, user, permission_name)
1230 self.user_repo_permission_ids.append(
1209 self.user_repo_permission_ids.append(
1231 (repo.repo_id, user.user_id))
1210 (repo.repo_id, user.user_id))
1232 return permission
1211 return permission
1233
1212
1234 def grant_user_group_permission_to_repo(
1213 def grant_user_group_permission_to_repo(
1235 self, repo, user_group, permission_name):
1214 self, repo, user_group, permission_name):
1236 permission = RepoModel().grant_user_group_permission(
1215 permission = RepoModel().grant_user_group_permission(
1237 repo, user_group, permission_name)
1216 repo, user_group, permission_name)
1238 self.user_group_repo_permission_ids.append(
1217 self.user_group_repo_permission_ids.append(
1239 (repo.repo_id, user_group.users_group_id))
1218 (repo.repo_id, user_group.users_group_id))
1240 return permission
1219 return permission
1241
1220
1242 def grant_user_permission_to_user_group(
1221 def grant_user_permission_to_user_group(
1243 self, target_user_group, user, permission_name):
1222 self, target_user_group, user, permission_name):
1244 permission = UserGroupModel().grant_user_permission(
1223 permission = UserGroupModel().grant_user_permission(
1245 target_user_group, user, permission_name)
1224 target_user_group, user, permission_name)
1246 self.user_user_group_permission_ids.append(
1225 self.user_user_group_permission_ids.append(
1247 (target_user_group.users_group_id, user.user_id))
1226 (target_user_group.users_group_id, user.user_id))
1248 return permission
1227 return permission
1249
1228
1250 def grant_user_group_permission_to_user_group(
1229 def grant_user_group_permission_to_user_group(
1251 self, target_user_group, user_group, permission_name):
1230 self, target_user_group, user_group, permission_name):
1252 permission = UserGroupModel().grant_user_group_permission(
1231 permission = UserGroupModel().grant_user_group_permission(
1253 target_user_group, user_group, permission_name)
1232 target_user_group, user_group, permission_name)
1254 self.user_group_user_group_permission_ids.append(
1233 self.user_group_user_group_permission_ids.append(
1255 (target_user_group.users_group_id, user_group.users_group_id))
1234 (target_user_group.users_group_id, user_group.users_group_id))
1256 return permission
1235 return permission
1257
1236
1258 def revoke_user_permission(self, user_name, permission_name):
1237 def revoke_user_permission(self, user_name, permission_name):
1259 self._inherit_default_user_permissions(user_name, True)
1238 self._inherit_default_user_permissions(user_name, True)
1260 UserModel().revoke_perm(user_name, permission_name)
1239 UserModel().revoke_perm(user_name, permission_name)
1261
1240
1262 def _inherit_default_user_permissions(self, user_name, value):
1241 def _inherit_default_user_permissions(self, user_name, value):
1263 user = UserModel().get_by_username(user_name)
1242 user = UserModel().get_by_username(user_name)
1264 user.inherit_default_permissions = value
1243 user.inherit_default_permissions = value
1265 Session().add(user)
1244 Session().add(user)
1266 Session().commit()
1245 Session().commit()
1267
1246
1268 def cleanup(self):
1247 def cleanup(self):
1269 self._cleanup_permissions()
1248 self._cleanup_permissions()
1270 self._cleanup_repos()
1249 self._cleanup_repos()
1271 self._cleanup_repo_groups()
1250 self._cleanup_repo_groups()
1272 self._cleanup_user_groups()
1251 self._cleanup_user_groups()
1273 self._cleanup_users()
1252 self._cleanup_users()
1274
1253
1275 def _cleanup_permissions(self):
1254 def _cleanup_permissions(self):
1276 if self.user_permissions:
1255 if self.user_permissions:
1277 for user_name, permission_name in self.user_permissions:
1256 for user_name, permission_name in self.user_permissions:
1278 self.revoke_user_permission(user_name, permission_name)
1257 self.revoke_user_permission(user_name, permission_name)
1279
1258
1280 for permission in self.user_repo_permission_ids:
1259 for permission in self.user_repo_permission_ids:
1281 RepoModel().revoke_user_permission(*permission)
1260 RepoModel().revoke_user_permission(*permission)
1282
1261
1283 for permission in self.user_group_repo_permission_ids:
1262 for permission in self.user_group_repo_permission_ids:
1284 RepoModel().revoke_user_group_permission(*permission)
1263 RepoModel().revoke_user_group_permission(*permission)
1285
1264
1286 for permission in self.user_repo_group_permission_ids:
1265 for permission in self.user_repo_group_permission_ids:
1287 RepoGroupModel().revoke_user_permission(*permission)
1266 RepoGroupModel().revoke_user_permission(*permission)
1288
1267
1289 for permission in self.user_group_repo_group_permission_ids:
1268 for permission in self.user_group_repo_group_permission_ids:
1290 RepoGroupModel().revoke_user_group_permission(*permission)
1269 RepoGroupModel().revoke_user_group_permission(*permission)
1291
1270
1292 for permission in self.user_user_group_permission_ids:
1271 for permission in self.user_user_group_permission_ids:
1293 UserGroupModel().revoke_user_permission(*permission)
1272 UserGroupModel().revoke_user_permission(*permission)
1294
1273
1295 for permission in self.user_group_user_group_permission_ids:
1274 for permission in self.user_group_user_group_permission_ids:
1296 UserGroupModel().revoke_user_group_permission(*permission)
1275 UserGroupModel().revoke_user_group_permission(*permission)
1297
1276
1298 def _cleanup_repo_groups(self):
1277 def _cleanup_repo_groups(self):
1299 def _repo_group_compare(first_group_id, second_group_id):
1278 def _repo_group_compare(first_group_id, second_group_id):
1300 """
1279 """
1301 Gives higher priority to the groups with the most complex paths
1280 Gives higher priority to the groups with the most complex paths
1302 """
1281 """
1303 first_group = RepoGroup.get(first_group_id)
1282 first_group = RepoGroup.get(first_group_id)
1304 second_group = RepoGroup.get(second_group_id)
1283 second_group = RepoGroup.get(second_group_id)
1305 first_group_parts = (
1284 first_group_parts = (
1306 len(first_group.group_name.split('/')) if first_group else 0)
1285 len(first_group.group_name.split('/')) if first_group else 0)
1307 second_group_parts = (
1286 second_group_parts = (
1308 len(second_group.group_name.split('/')) if second_group else 0)
1287 len(second_group.group_name.split('/')) if second_group else 0)
1309 return cmp(second_group_parts, first_group_parts)
1288 return cmp(second_group_parts, first_group_parts)
1310
1289
1311 sorted_repo_group_ids = sorted(
1290 sorted_repo_group_ids = sorted(
1312 self.repo_group_ids, cmp=_repo_group_compare)
1291 self.repo_group_ids, cmp=_repo_group_compare)
1313 for repo_group_id in sorted_repo_group_ids:
1292 for repo_group_id in sorted_repo_group_ids:
1314 self.fixture.destroy_repo_group(repo_group_id)
1293 self.fixture.destroy_repo_group(repo_group_id)
1315
1294
1316 def _cleanup_repos(self):
1295 def _cleanup_repos(self):
1317 sorted_repos_ids = sorted(self.repos_ids)
1296 sorted_repos_ids = sorted(self.repos_ids)
1318 for repo_id in sorted_repos_ids:
1297 for repo_id in sorted_repos_ids:
1319 self.fixture.destroy_repo(repo_id)
1298 self.fixture.destroy_repo(repo_id)
1320
1299
1321 def _cleanup_user_groups(self):
1300 def _cleanup_user_groups(self):
1322 def _user_group_compare(first_group_id, second_group_id):
1301 def _user_group_compare(first_group_id, second_group_id):
1323 """
1302 """
1324 Gives higher priority to the groups with the most complex paths
1303 Gives higher priority to the groups with the most complex paths
1325 """
1304 """
1326 first_group = UserGroup.get(first_group_id)
1305 first_group = UserGroup.get(first_group_id)
1327 second_group = UserGroup.get(second_group_id)
1306 second_group = UserGroup.get(second_group_id)
1328 first_group_parts = (
1307 first_group_parts = (
1329 len(first_group.users_group_name.split('/'))
1308 len(first_group.users_group_name.split('/'))
1330 if first_group else 0)
1309 if first_group else 0)
1331 second_group_parts = (
1310 second_group_parts = (
1332 len(second_group.users_group_name.split('/'))
1311 len(second_group.users_group_name.split('/'))
1333 if second_group else 0)
1312 if second_group else 0)
1334 return cmp(second_group_parts, first_group_parts)
1313 return cmp(second_group_parts, first_group_parts)
1335
1314
1336 sorted_user_group_ids = sorted(
1315 sorted_user_group_ids = sorted(
1337 self.user_group_ids, cmp=_user_group_compare)
1316 self.user_group_ids, cmp=_user_group_compare)
1338 for user_group_id in sorted_user_group_ids:
1317 for user_group_id in sorted_user_group_ids:
1339 self.fixture.destroy_user_group(user_group_id)
1318 self.fixture.destroy_user_group(user_group_id)
1340
1319
1341 def _cleanup_users(self):
1320 def _cleanup_users(self):
1342 for user_id in self.user_ids:
1321 for user_id in self.user_ids:
1343 self.fixture.destroy_user(user_id)
1322 self.fixture.destroy_user(user_id)
1344
1323
1345
1324
1346 # TODO: Think about moving this into a pytest-pyro package and make it a
1325 # TODO: Think about moving this into a pytest-pyro package and make it a
1347 # pytest plugin
1326 # pytest plugin
1348 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1327 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1349 def pytest_runtest_makereport(item, call):
1328 def pytest_runtest_makereport(item, call):
1350 """
1329 """
1351 Adding the remote traceback if the exception has this information.
1330 Adding the remote traceback if the exception has this information.
1352
1331
1353 Pyro4 attaches this information as the attribute `_vcs_server_traceback`
1332 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1354 to the exception instance.
1333 to the exception instance.
1355 """
1334 """
1356 outcome = yield
1335 outcome = yield
1357 report = outcome.get_result()
1336 report = outcome.get_result()
1358 if call.excinfo:
1337 if call.excinfo:
1359 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1338 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1360
1339
1361
1340
1362 def _add_vcsserver_remote_traceback(report, exc):
1341 def _add_vcsserver_remote_traceback(report, exc):
1363 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1342 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1364
1343
1365 if vcsserver_traceback:
1344 if vcsserver_traceback:
1366 section = 'VCSServer remote traceback ' + report.when
1345 section = 'VCSServer remote traceback ' + report.when
1367 report.sections.append((section, vcsserver_traceback))
1346 report.sections.append((section, vcsserver_traceback))
1368
1347
1369
1348
1370 @pytest.fixture(scope='session')
1349 @pytest.fixture(scope='session')
1371 def testrun():
1350 def testrun():
1372 return {
1351 return {
1373 'uuid': uuid.uuid4(),
1352 'uuid': uuid.uuid4(),
1374 'start': datetime.datetime.utcnow().isoformat(),
1353 'start': datetime.datetime.utcnow().isoformat(),
1375 'timestamp': int(time.time()),
1354 'timestamp': int(time.time()),
1376 }
1355 }
1377
1356
1378
1357
1379 @pytest.fixture(autouse=True)
1358 @pytest.fixture(autouse=True)
1380 def collect_appenlight_stats(request, testrun):
1359 def collect_appenlight_stats(request, testrun):
1381 """
1360 """
1382 This fixture reports memory consumtion of single tests.
1361 This fixture reports memory consumtion of single tests.
1383
1362
1384 It gathers data based on `psutil` and sends them to Appenlight. The option
1363 It gathers data based on `psutil` and sends them to Appenlight. The option
1385 ``--ae`` has te be used to enable this fixture and the API key for your
1364 ``--ae`` has te be used to enable this fixture and the API key for your
1386 application has to be provided in ``--ae-key``.
1365 application has to be provided in ``--ae-key``.
1387 """
1366 """
1388 try:
1367 try:
1389 # cygwin cannot have yet psutil support.
1368 # cygwin cannot have yet psutil support.
1390 import psutil
1369 import psutil
1391 except ImportError:
1370 except ImportError:
1392 return
1371 return
1393
1372
1394 if not request.config.getoption('--appenlight'):
1373 if not request.config.getoption('--appenlight'):
1395 return
1374 return
1396 else:
1375 else:
1397 # Only request the pylonsapp fixture if appenlight tracking is
1376 # Only request the pylonsapp fixture if appenlight tracking is
1398 # enabled. This will speed up a test run of unit tests by 2 to 3
1377 # enabled. This will speed up a test run of unit tests by 2 to 3
1399 # seconds if appenlight is not enabled.
1378 # seconds if appenlight is not enabled.
1400 pylonsapp = request.getfuncargvalue("pylonsapp")
1379 pylonsapp = request.getfuncargvalue("pylonsapp")
1401 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1380 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1402 client = AppenlightClient(
1381 client = AppenlightClient(
1403 url=url,
1382 url=url,
1404 api_key=request.config.getoption('--appenlight-api-key'),
1383 api_key=request.config.getoption('--appenlight-api-key'),
1405 namespace=request.node.nodeid,
1384 namespace=request.node.nodeid,
1406 request=str(testrun['uuid']),
1385 request=str(testrun['uuid']),
1407 testrun=testrun)
1386 testrun=testrun)
1408
1387
1409 client.collect({
1388 client.collect({
1410 'message': "Starting",
1389 'message': "Starting",
1411 })
1390 })
1412
1391
1413 server_and_port = pylonsapp.config['vcs.server']
1392 server_and_port = pylonsapp.config['vcs.server']
1414 server = create_vcsserver_proxy(server_and_port)
1393 protocol = pylonsapp.config['vcs.server.protocol']
1394 server = create_vcsserver_proxy(server_and_port, protocol)
1415 with server:
1395 with server:
1416 vcs_pid = server.get_pid()
1396 vcs_pid = server.get_pid()
1417 server.run_gc()
1397 server.run_gc()
1418 vcs_process = psutil.Process(vcs_pid)
1398 vcs_process = psutil.Process(vcs_pid)
1419 mem = vcs_process.memory_info()
1399 mem = vcs_process.memory_info()
1420 client.tag_before('vcsserver.rss', mem.rss)
1400 client.tag_before('vcsserver.rss', mem.rss)
1421 client.tag_before('vcsserver.vms', mem.vms)
1401 client.tag_before('vcsserver.vms', mem.vms)
1422
1402
1423 test_process = psutil.Process()
1403 test_process = psutil.Process()
1424 mem = test_process.memory_info()
1404 mem = test_process.memory_info()
1425 client.tag_before('test.rss', mem.rss)
1405 client.tag_before('test.rss', mem.rss)
1426 client.tag_before('test.vms', mem.vms)
1406 client.tag_before('test.vms', mem.vms)
1427
1407
1428 client.tag_before('time', time.time())
1408 client.tag_before('time', time.time())
1429
1409
1430 @request.addfinalizer
1410 @request.addfinalizer
1431 def send_stats():
1411 def send_stats():
1432 client.tag_after('time', time.time())
1412 client.tag_after('time', time.time())
1433 with server:
1413 with server:
1434 gc_stats = server.run_gc()
1414 gc_stats = server.run_gc()
1435 for tag, value in gc_stats.items():
1415 for tag, value in gc_stats.items():
1436 client.tag_after(tag, value)
1416 client.tag_after(tag, value)
1437 mem = vcs_process.memory_info()
1417 mem = vcs_process.memory_info()
1438 client.tag_after('vcsserver.rss', mem.rss)
1418 client.tag_after('vcsserver.rss', mem.rss)
1439 client.tag_after('vcsserver.vms', mem.vms)
1419 client.tag_after('vcsserver.vms', mem.vms)
1440
1420
1441 mem = test_process.memory_info()
1421 mem = test_process.memory_info()
1442 client.tag_after('test.rss', mem.rss)
1422 client.tag_after('test.rss', mem.rss)
1443 client.tag_after('test.vms', mem.vms)
1423 client.tag_after('test.vms', mem.vms)
1444
1424
1445 client.collect({
1425 client.collect({
1446 'message': "Finished",
1426 'message': "Finished",
1447 })
1427 })
1448 client.send_stats()
1428 client.send_stats()
1449
1429
1450 return client
1430 return client
1451
1431
1452
1432
1453 class AppenlightClient():
1433 class AppenlightClient():
1454
1434
1455 url_template = '{url}?protocol_version=0.5'
1435 url_template = '{url}?protocol_version=0.5'
1456
1436
1457 def __init__(
1437 def __init__(
1458 self, url, api_key, add_server=True, add_timestamp=True,
1438 self, url, api_key, add_server=True, add_timestamp=True,
1459 namespace=None, request=None, testrun=None):
1439 namespace=None, request=None, testrun=None):
1460 self.url = self.url_template.format(url=url)
1440 self.url = self.url_template.format(url=url)
1461 self.api_key = api_key
1441 self.api_key = api_key
1462 self.add_server = add_server
1442 self.add_server = add_server
1463 self.add_timestamp = add_timestamp
1443 self.add_timestamp = add_timestamp
1464 self.namespace = namespace
1444 self.namespace = namespace
1465 self.request = request
1445 self.request = request
1466 self.server = socket.getfqdn(socket.gethostname())
1446 self.server = socket.getfqdn(socket.gethostname())
1467 self.tags_before = {}
1447 self.tags_before = {}
1468 self.tags_after = {}
1448 self.tags_after = {}
1469 self.stats = []
1449 self.stats = []
1470 self.testrun = testrun or {}
1450 self.testrun = testrun or {}
1471
1451
1472 def tag_before(self, tag, value):
1452 def tag_before(self, tag, value):
1473 self.tags_before[tag] = value
1453 self.tags_before[tag] = value
1474
1454
1475 def tag_after(self, tag, value):
1455 def tag_after(self, tag, value):
1476 self.tags_after[tag] = value
1456 self.tags_after[tag] = value
1477
1457
1478 def collect(self, data):
1458 def collect(self, data):
1479 if self.add_server:
1459 if self.add_server:
1480 data.setdefault('server', self.server)
1460 data.setdefault('server', self.server)
1481 if self.add_timestamp:
1461 if self.add_timestamp:
1482 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1462 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1483 if self.namespace:
1463 if self.namespace:
1484 data.setdefault('namespace', self.namespace)
1464 data.setdefault('namespace', self.namespace)
1485 if self.request:
1465 if self.request:
1486 data.setdefault('request', self.request)
1466 data.setdefault('request', self.request)
1487 self.stats.append(data)
1467 self.stats.append(data)
1488
1468
1489 def send_stats(self):
1469 def send_stats(self):
1490 tags = [
1470 tags = [
1491 ('testrun', self.request),
1471 ('testrun', self.request),
1492 ('testrun.start', self.testrun['start']),
1472 ('testrun.start', self.testrun['start']),
1493 ('testrun.timestamp', self.testrun['timestamp']),
1473 ('testrun.timestamp', self.testrun['timestamp']),
1494 ('test', self.namespace),
1474 ('test', self.namespace),
1495 ]
1475 ]
1496 for key, value in self.tags_before.items():
1476 for key, value in self.tags_before.items():
1497 tags.append((key + '.before', value))
1477 tags.append((key + '.before', value))
1498 try:
1478 try:
1499 delta = self.tags_after[key] - value
1479 delta = self.tags_after[key] - value
1500 tags.append((key + '.delta', delta))
1480 tags.append((key + '.delta', delta))
1501 except Exception:
1481 except Exception:
1502 pass
1482 pass
1503 for key, value in self.tags_after.items():
1483 for key, value in self.tags_after.items():
1504 tags.append((key + '.after', value))
1484 tags.append((key + '.after', value))
1505 self.collect({
1485 self.collect({
1506 'message': "Collected tags",
1486 'message': "Collected tags",
1507 'tags': tags,
1487 'tags': tags,
1508 })
1488 })
1509
1489
1510 response = requests.post(
1490 response = requests.post(
1511 self.url,
1491 self.url,
1512 headers={
1492 headers={
1513 'X-appenlight-api-key': self.api_key},
1493 'X-appenlight-api-key': self.api_key},
1514 json=self.stats,
1494 json=self.stats,
1515 )
1495 )
1516
1496
1517 if not response.status_code == 200:
1497 if not response.status_code == 200:
1518 pprint.pprint(self.stats)
1498 pprint.pprint(self.stats)
1519 print response.headers
1499 print response.headers
1520 print response.text
1500 print response.text
1521 raise Exception('Sending to appenlight failed')
1501 raise Exception('Sending to appenlight failed')
1522
1502
1523
1503
1524 @pytest.fixture
1504 @pytest.fixture
1525 def gist_util(request, pylonsapp):
1505 def gist_util(request, pylonsapp):
1526 """
1506 """
1527 Provides a wired instance of `GistUtility` with integrated cleanup.
1507 Provides a wired instance of `GistUtility` with integrated cleanup.
1528 """
1508 """
1529 utility = GistUtility()
1509 utility = GistUtility()
1530 request.addfinalizer(utility.cleanup)
1510 request.addfinalizer(utility.cleanup)
1531 return utility
1511 return utility
1532
1512
1533
1513
1534 class GistUtility(object):
1514 class GistUtility(object):
1535 def __init__(self):
1515 def __init__(self):
1536 self.fixture = Fixture()
1516 self.fixture = Fixture()
1537 self.gist_ids = []
1517 self.gist_ids = []
1538
1518
1539 def create_gist(self, **kwargs):
1519 def create_gist(self, **kwargs):
1540 gist = self.fixture.create_gist(**kwargs)
1520 gist = self.fixture.create_gist(**kwargs)
1541 self.gist_ids.append(gist.gist_id)
1521 self.gist_ids.append(gist.gist_id)
1542 return gist
1522 return gist
1543
1523
1544 def cleanup(self):
1524 def cleanup(self):
1545 for id_ in self.gist_ids:
1525 for id_ in self.gist_ids:
1546 self.fixture.destroy_gists(str(id_))
1526 self.fixture.destroy_gists(str(id_))
1547
1527
1548
1528
1549 @pytest.fixture
1529 @pytest.fixture
1550 def enabled_backends(request):
1530 def enabled_backends(request):
1551 backends = request.config.option.backends
1531 backends = request.config.option.backends
1552 return backends[:]
1532 return backends[:]
1553
1533
1554
1534
1555 @pytest.fixture
1535 @pytest.fixture
1556 def settings_util(request):
1536 def settings_util(request):
1557 """
1537 """
1558 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1538 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1559 """
1539 """
1560 utility = SettingsUtility()
1540 utility = SettingsUtility()
1561 request.addfinalizer(utility.cleanup)
1541 request.addfinalizer(utility.cleanup)
1562 return utility
1542 return utility
1563
1543
1564
1544
1565 class SettingsUtility(object):
1545 class SettingsUtility(object):
1566 def __init__(self):
1546 def __init__(self):
1567 self.rhodecode_ui_ids = []
1547 self.rhodecode_ui_ids = []
1568 self.rhodecode_setting_ids = []
1548 self.rhodecode_setting_ids = []
1569 self.repo_rhodecode_ui_ids = []
1549 self.repo_rhodecode_ui_ids = []
1570 self.repo_rhodecode_setting_ids = []
1550 self.repo_rhodecode_setting_ids = []
1571
1551
1572 def create_repo_rhodecode_ui(
1552 def create_repo_rhodecode_ui(
1573 self, repo, section, value, key=None, active=True, cleanup=True):
1553 self, repo, section, value, key=None, active=True, cleanup=True):
1574 key = key or hashlib.sha1(
1554 key = key or hashlib.sha1(
1575 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1555 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1576
1556
1577 setting = RepoRhodeCodeUi()
1557 setting = RepoRhodeCodeUi()
1578 setting.repository_id = repo.repo_id
1558 setting.repository_id = repo.repo_id
1579 setting.ui_section = section
1559 setting.ui_section = section
1580 setting.ui_value = value
1560 setting.ui_value = value
1581 setting.ui_key = key
1561 setting.ui_key = key
1582 setting.ui_active = active
1562 setting.ui_active = active
1583 Session().add(setting)
1563 Session().add(setting)
1584 Session().commit()
1564 Session().commit()
1585
1565
1586 if cleanup:
1566 if cleanup:
1587 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1567 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1588 return setting
1568 return setting
1589
1569
1590 def create_rhodecode_ui(
1570 def create_rhodecode_ui(
1591 self, section, value, key=None, active=True, cleanup=True):
1571 self, section, value, key=None, active=True, cleanup=True):
1592 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1572 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1593
1573
1594 setting = RhodeCodeUi()
1574 setting = RhodeCodeUi()
1595 setting.ui_section = section
1575 setting.ui_section = section
1596 setting.ui_value = value
1576 setting.ui_value = value
1597 setting.ui_key = key
1577 setting.ui_key = key
1598 setting.ui_active = active
1578 setting.ui_active = active
1599 Session().add(setting)
1579 Session().add(setting)
1600 Session().commit()
1580 Session().commit()
1601
1581
1602 if cleanup:
1582 if cleanup:
1603 self.rhodecode_ui_ids.append(setting.ui_id)
1583 self.rhodecode_ui_ids.append(setting.ui_id)
1604 return setting
1584 return setting
1605
1585
1606 def create_repo_rhodecode_setting(
1586 def create_repo_rhodecode_setting(
1607 self, repo, name, value, type_, cleanup=True):
1587 self, repo, name, value, type_, cleanup=True):
1608 setting = RepoRhodeCodeSetting(
1588 setting = RepoRhodeCodeSetting(
1609 repo.repo_id, key=name, val=value, type=type_)
1589 repo.repo_id, key=name, val=value, type=type_)
1610 Session().add(setting)
1590 Session().add(setting)
1611 Session().commit()
1591 Session().commit()
1612
1592
1613 if cleanup:
1593 if cleanup:
1614 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1594 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1615 return setting
1595 return setting
1616
1596
1617 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1597 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1618 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1598 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1619 Session().add(setting)
1599 Session().add(setting)
1620 Session().commit()
1600 Session().commit()
1621
1601
1622 if cleanup:
1602 if cleanup:
1623 self.rhodecode_setting_ids.append(setting.app_settings_id)
1603 self.rhodecode_setting_ids.append(setting.app_settings_id)
1624
1604
1625 return setting
1605 return setting
1626
1606
1627 def cleanup(self):
1607 def cleanup(self):
1628 for id_ in self.rhodecode_ui_ids:
1608 for id_ in self.rhodecode_ui_ids:
1629 setting = RhodeCodeUi.get(id_)
1609 setting = RhodeCodeUi.get(id_)
1630 Session().delete(setting)
1610 Session().delete(setting)
1631
1611
1632 for id_ in self.rhodecode_setting_ids:
1612 for id_ in self.rhodecode_setting_ids:
1633 setting = RhodeCodeSetting.get(id_)
1613 setting = RhodeCodeSetting.get(id_)
1634 Session().delete(setting)
1614 Session().delete(setting)
1635
1615
1636 for id_ in self.repo_rhodecode_ui_ids:
1616 for id_ in self.repo_rhodecode_ui_ids:
1637 setting = RepoRhodeCodeUi.get(id_)
1617 setting = RepoRhodeCodeUi.get(id_)
1638 Session().delete(setting)
1618 Session().delete(setting)
1639
1619
1640 for id_ in self.repo_rhodecode_setting_ids:
1620 for id_ in self.repo_rhodecode_setting_ids:
1641 setting = RepoRhodeCodeSetting.get(id_)
1621 setting = RepoRhodeCodeSetting.get(id_)
1642 Session().delete(setting)
1622 Session().delete(setting)
1643
1623
1644 Session().commit()
1624 Session().commit()
1645
1625
1646
1626
1647 @pytest.fixture
1627 @pytest.fixture
1648 def no_notifications(request):
1628 def no_notifications(request):
1649 notification_patcher = mock.patch(
1629 notification_patcher = mock.patch(
1650 'rhodecode.model.notification.NotificationModel.create')
1630 'rhodecode.model.notification.NotificationModel.create')
1651 notification_patcher.start()
1631 notification_patcher.start()
1652 request.addfinalizer(notification_patcher.stop)
1632 request.addfinalizer(notification_patcher.stop)
1653
1633
1654
1634
1655 @pytest.fixture
1635 @pytest.fixture
1656 def silence_action_logger(request):
1636 def silence_action_logger(request):
1657 notification_patcher = mock.patch(
1637 notification_patcher = mock.patch(
1658 'rhodecode.lib.utils.action_logger')
1638 'rhodecode.lib.utils.action_logger')
1659 notification_patcher.start()
1639 notification_patcher.start()
1660 request.addfinalizer(notification_patcher.stop)
1640 request.addfinalizer(notification_patcher.stop)
1661
1641
1662
1642
1663 @pytest.fixture(scope='session')
1643 @pytest.fixture(scope='session')
1664 def repeat(request):
1644 def repeat(request):
1665 """
1645 """
1666 The number of repetitions is based on this fixture.
1646 The number of repetitions is based on this fixture.
1667
1647
1668 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1648 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1669 tests are not too slow in our default test suite.
1649 tests are not too slow in our default test suite.
1670 """
1650 """
1671 return request.config.getoption('--repeat')
1651 return request.config.getoption('--repeat')
1672
1652
1673
1653
1674 @pytest.fixture
1654 @pytest.fixture
1675 def rhodecode_fixtures():
1655 def rhodecode_fixtures():
1676 return Fixture()
1656 return Fixture()
1677
1657
1678
1658
1679 @pytest.fixture
1659 @pytest.fixture
1680 def request_stub():
1660 def request_stub():
1681 """
1661 """
1682 Stub request object.
1662 Stub request object.
1683 """
1663 """
1684 request = pyramid.testing.DummyRequest()
1664 request = pyramid.testing.DummyRequest()
1685 request.scheme = 'https'
1665 request.scheme = 'https'
1686 return request
1666 return request
1687
1667
1688
1668
1689 @pytest.fixture
1669 @pytest.fixture
1690 def config_stub(request, request_stub):
1670 def config_stub(request, request_stub):
1691 """
1671 """
1692 Set up pyramid.testing and return the Configurator.
1672 Set up pyramid.testing and return the Configurator.
1693 """
1673 """
1694 config = pyramid.testing.setUp(request=request_stub)
1674 config = pyramid.testing.setUp(request=request_stub)
1695
1675
1696 @request.addfinalizer
1676 @request.addfinalizer
1697 def cleanup():
1677 def cleanup():
1698 pyramid.testing.tearDown()
1678 pyramid.testing.tearDown()
1699
1679
1700 return config
1680 return config
1701
1681
1702
1682
1703 @pytest.fixture
1683 @pytest.fixture
1704 def StubIntegrationType():
1684 def StubIntegrationType():
1705 class _StubIntegrationType(IntegrationTypeBase):
1685 class _StubIntegrationType(IntegrationTypeBase):
1706 """ Test integration type class """
1686 """ Test integration type class """
1707
1687
1708 key = 'test'
1688 key = 'test'
1709 display_name = 'Test integration type'
1689 display_name = 'Test integration type'
1710 description = 'A test integration type for testing'
1690 description = 'A test integration type for testing'
1711 icon = 'test_icon_html_image'
1691 icon = 'test_icon_html_image'
1712
1692
1713 def __init__(self, settings):
1693 def __init__(self, settings):
1714 super(_StubIntegrationType, self).__init__(settings)
1694 super(_StubIntegrationType, self).__init__(settings)
1715 self.sent_events = [] # for testing
1695 self.sent_events = [] # for testing
1716
1696
1717 def send_event(self, event):
1697 def send_event(self, event):
1718 self.sent_events.append(event)
1698 self.sent_events.append(event)
1719
1699
1720 def settings_schema(self):
1700 def settings_schema(self):
1721 class SettingsSchema(colander.Schema):
1701 class SettingsSchema(colander.Schema):
1722 test_string_field = colander.SchemaNode(
1702 test_string_field = colander.SchemaNode(
1723 colander.String(),
1703 colander.String(),
1724 missing=colander.required,
1704 missing=colander.required,
1725 title='test string field',
1705 title='test string field',
1726 )
1706 )
1727 test_int_field = colander.SchemaNode(
1707 test_int_field = colander.SchemaNode(
1728 colander.Int(),
1708 colander.Int(),
1729 title='some integer setting',
1709 title='some integer setting',
1730 )
1710 )
1731 return SettingsSchema()
1711 return SettingsSchema()
1732
1712
1733
1713
1734 integration_type_registry.register_integration_type(_StubIntegrationType)
1714 integration_type_registry.register_integration_type(_StubIntegrationType)
1735 return _StubIntegrationType
1715 return _StubIntegrationType
1736
1716
1737 @pytest.fixture
1717 @pytest.fixture
1738 def stub_integration_settings():
1718 def stub_integration_settings():
1739 return {
1719 return {
1740 'test_string_field': 'some data',
1720 'test_string_field': 'some data',
1741 'test_int_field': 100,
1721 'test_int_field': 100,
1742 }
1722 }
1743
1723
1744
1724
1745 @pytest.fixture
1725 @pytest.fixture
1746 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1726 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1747 stub_integration_settings):
1727 stub_integration_settings):
1748 integration = IntegrationModel().create(
1728 integration = IntegrationModel().create(
1749 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1729 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1750 name='test repo integration',
1730 name='test repo integration',
1751 repo=repo_stub, repo_group=None, child_repos_only=None)
1731 repo=repo_stub, repo_group=None, child_repos_only=None)
1752
1732
1753 @request.addfinalizer
1733 @request.addfinalizer
1754 def cleanup():
1734 def cleanup():
1755 IntegrationModel().delete(integration)
1735 IntegrationModel().delete(integration)
1756
1736
1757 return integration
1737 return integration
1758
1738
1759
1739
1760 @pytest.fixture
1740 @pytest.fixture
1761 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1741 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1762 stub_integration_settings):
1742 stub_integration_settings):
1763 integration = IntegrationModel().create(
1743 integration = IntegrationModel().create(
1764 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1744 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1765 name='test repogroup integration',
1745 name='test repogroup integration',
1766 repo=None, repo_group=test_repo_group, child_repos_only=True)
1746 repo=None, repo_group=test_repo_group, child_repos_only=True)
1767
1747
1768 @request.addfinalizer
1748 @request.addfinalizer
1769 def cleanup():
1749 def cleanup():
1770 IntegrationModel().delete(integration)
1750 IntegrationModel().delete(integration)
1771
1751
1772 return integration
1752 return integration
1773
1753
1774
1754
1775 @pytest.fixture
1755 @pytest.fixture
1776 def repogroup_recursive_integration_stub(request, test_repo_group,
1756 def repogroup_recursive_integration_stub(request, test_repo_group,
1777 StubIntegrationType, stub_integration_settings):
1757 StubIntegrationType, stub_integration_settings):
1778 integration = IntegrationModel().create(
1758 integration = IntegrationModel().create(
1779 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1759 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1780 name='test recursive repogroup integration',
1760 name='test recursive repogroup integration',
1781 repo=None, repo_group=test_repo_group, child_repos_only=False)
1761 repo=None, repo_group=test_repo_group, child_repos_only=False)
1782
1762
1783 @request.addfinalizer
1763 @request.addfinalizer
1784 def cleanup():
1764 def cleanup():
1785 IntegrationModel().delete(integration)
1765 IntegrationModel().delete(integration)
1786
1766
1787 return integration
1767 return integration
1788
1768
1789
1769
1790 @pytest.fixture
1770 @pytest.fixture
1791 def global_integration_stub(request, StubIntegrationType,
1771 def global_integration_stub(request, StubIntegrationType,
1792 stub_integration_settings):
1772 stub_integration_settings):
1793 integration = IntegrationModel().create(
1773 integration = IntegrationModel().create(
1794 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1774 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1795 name='test global integration',
1775 name='test global integration',
1796 repo=None, repo_group=None, child_repos_only=None)
1776 repo=None, repo_group=None, child_repos_only=None)
1797
1777
1798 @request.addfinalizer
1778 @request.addfinalizer
1799 def cleanup():
1779 def cleanup():
1800 IntegrationModel().delete(integration)
1780 IntegrationModel().delete(integration)
1801
1781
1802 return integration
1782 return integration
1803
1783
1804
1784
1805 @pytest.fixture
1785 @pytest.fixture
1806 def root_repos_integration_stub(request, StubIntegrationType,
1786 def root_repos_integration_stub(request, StubIntegrationType,
1807 stub_integration_settings):
1787 stub_integration_settings):
1808 integration = IntegrationModel().create(
1788 integration = IntegrationModel().create(
1809 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1789 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1810 name='test global integration',
1790 name='test global integration',
1811 repo=None, repo_group=None, child_repos_only=True)
1791 repo=None, repo_group=None, child_repos_only=True)
1812
1792
1813 @request.addfinalizer
1793 @request.addfinalizer
1814 def cleanup():
1794 def cleanup():
1815 IntegrationModel().delete(integration)
1795 IntegrationModel().delete(integration)
1816
1796
1817 return integration
1797 return integration
1818
1798
1819
1799
1820 @pytest.fixture
1800 @pytest.fixture
1821 def local_dt_to_utc():
1801 def local_dt_to_utc():
1822 def _factory(dt):
1802 def _factory(dt):
1823 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1803 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1824 dateutil.tz.tzutc()).replace(tzinfo=None)
1804 dateutil.tz.tzutc()).replace(tzinfo=None)
1825 return _factory
1805 return _factory
@@ -1,471 +1,422 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import logging.config
22 import logging.config
23 import os
23 import os
24 import platform
24 import platform
25 import socket
25 import socket
26 import subprocess32
26 import subprocess32
27 import time
27 import time
28 from urllib2 import urlopen, URLError
28 from urllib2 import urlopen, URLError
29
29
30 import configobj
30 import configobj
31 import pylons
31 import pylons
32 import pytest
32 import pytest
33 import webob
33 import webob
34 from beaker.session import SessionObject
34 from beaker.session import SessionObject
35 from paste.deploy import loadapp
35 from paste.deploy import loadapp
36 from pylons.i18n.translation import _get_translator
36 from pylons.i18n.translation import _get_translator
37 from pylons.util import ContextObj
37 from pylons.util import ContextObj
38 from Pyro4.errors import CommunicationError
39 from routes.util import URLGenerator
38 from routes.util import URLGenerator
40
39
41 from rhodecode.lib import vcs
40 from rhodecode.lib import vcs
42 from rhodecode.tests.fixture import TestINI
41 from rhodecode.tests.fixture import TestINI
43 import rhodecode
42 import rhodecode
44
43
45
44
46 def _parse_json(value):
45 def _parse_json(value):
47 return json.loads(value) if value else None
46 return json.loads(value) if value else None
48
47
49
48
50 def pytest_addoption(parser):
49 def pytest_addoption(parser):
51 parser.addoption(
50 parser.addoption(
52 '--test-loglevel', dest='test_loglevel',
51 '--test-loglevel', dest='test_loglevel',
53 help="Set default Logging level for tests, warn (default), info, debug")
52 help="Set default Logging level for tests, warn (default), info, debug")
54 group = parser.getgroup('pylons')
53 group = parser.getgroup('pylons')
55 group.addoption(
54 group.addoption(
56 '--with-pylons', dest='pylons_config',
55 '--with-pylons', dest='pylons_config',
57 help="Set up a Pylons environment with the specified config file.")
56 help="Set up a Pylons environment with the specified config file.")
58 group.addoption(
57 group.addoption(
59 '--pylons-config-override', action='store', type=_parse_json,
58 '--pylons-config-override', action='store', type=_parse_json,
60 default=None, dest='pylons_config_override', help=(
59 default=None, dest='pylons_config_override', help=(
61 "Overrides the .ini file settings. Should be specified in JSON"
60 "Overrides the .ini file settings. Should be specified in JSON"
62 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
61 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
63 )
62 )
64 )
63 )
65 parser.addini(
64 parser.addini(
66 'pylons_config',
65 'pylons_config',
67 "Set up a Pylons environment with the specified config file.")
66 "Set up a Pylons environment with the specified config file.")
68
67
69 vcsgroup = parser.getgroup('vcs')
68 vcsgroup = parser.getgroup('vcs')
70 vcsgroup.addoption(
69 vcsgroup.addoption(
71 '--without-vcsserver', dest='with_vcsserver', action='store_false',
70 '--without-vcsserver', dest='with_vcsserver', action='store_false',
72 help="Do not start the VCSServer in a background process.")
71 help="Do not start the VCSServer in a background process.")
73 vcsgroup.addoption(
72 vcsgroup.addoption(
74 '--with-vcsserver', dest='vcsserver_config_pyro4',
75 help="Start the VCSServer with the specified config file.")
76 vcsgroup.addoption(
77 '--with-vcsserver-http', dest='vcsserver_config_http',
73 '--with-vcsserver-http', dest='vcsserver_config_http',
78 help="Start the HTTP VCSServer with the specified config file.")
74 help="Start the HTTP VCSServer with the specified config file.")
79 vcsgroup.addoption(
75 vcsgroup.addoption(
80 '--vcsserver-protocol', dest='vcsserver_protocol',
76 '--vcsserver-protocol', dest='vcsserver_protocol',
81 help="Start the VCSServer with HTTP / Pyro4 protocol support.")
77 help="Start the VCSServer with HTTP protocol support.")
82 vcsgroup.addoption(
78 vcsgroup.addoption(
83 '--vcsserver-config-override', action='store', type=_parse_json,
79 '--vcsserver-config-override', action='store', type=_parse_json,
84 default=None, dest='vcsserver_config_override', help=(
80 default=None, dest='vcsserver_config_override', help=(
85 "Overrides the .ini file settings for the VCSServer. "
81 "Overrides the .ini file settings for the VCSServer. "
86 "Should be specified in JSON "
82 "Should be specified in JSON "
87 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
83 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
88 )
84 )
89 )
85 )
90 vcsgroup.addoption(
86 vcsgroup.addoption(
91 '--vcsserver-port', action='store', type=int,
87 '--vcsserver-port', action='store', type=int,
92 default=None, help=(
88 default=None, help=(
93 "Allows to set the port of the vcsserver. Useful when testing "
89 "Allows to set the port of the vcsserver. Useful when testing "
94 "against an already running server and random ports cause "
90 "against an already running server and random ports cause "
95 "trouble."))
91 "trouble."))
96 parser.addini(
92 parser.addini(
97 'vcsserver_config_pyro4',
98 "Start the VCSServer with the specified config file.")
99 parser.addini(
100 'vcsserver_config_http',
93 'vcsserver_config_http',
101 "Start the HTTP VCSServer with the specified config file.")
94 "Start the HTTP VCSServer with the specified config file.")
102 parser.addini(
95 parser.addini(
103 'vcsserver_protocol',
96 'vcsserver_protocol',
104 "Start the VCSServer with HTTP / Pyro4 protocol support.")
97 "Start the VCSServer with HTTP protocol support.")
105
98
106
99
107 @pytest.fixture(scope='session')
100 @pytest.fixture(scope='session')
108 def vcsserver(request, vcsserver_port, vcsserver_factory):
101 def vcsserver(request, vcsserver_port, vcsserver_factory):
109 """
102 """
110 Session scope VCSServer.
103 Session scope VCSServer.
111
104
112 Tests wich need the VCSServer have to rely on this fixture in order
105 Tests wich need the VCSServer have to rely on this fixture in order
113 to ensure it will be running.
106 to ensure it will be running.
114
107
115 For specific needs, the fixture vcsserver_factory can be used. It allows to
108 For specific needs, the fixture vcsserver_factory can be used. It allows to
116 adjust the configuration file for the test run.
109 adjust the configuration file for the test run.
117
110
118 Command line args:
111 Command line args:
119
112
120 --without-vcsserver: Allows to switch this fixture off. You have to
113 --without-vcsserver: Allows to switch this fixture off. You have to
121 manually start the server.
114 manually start the server.
122
115
123 --vcsserver-port: Will expect the VCSServer to listen on this port.
116 --vcsserver-port: Will expect the VCSServer to listen on this port.
124 """
117 """
125
118
126 if not request.config.getoption('with_vcsserver'):
119 if not request.config.getoption('with_vcsserver'):
127 return None
120 return None
128
121
129 use_http = _use_vcs_http_server(request.config)
122 use_http = _use_vcs_http_server(request.config)
130 return vcsserver_factory(
123 return vcsserver_factory(
131 request, use_http=use_http, vcsserver_port=vcsserver_port)
124 request, use_http=use_http, vcsserver_port=vcsserver_port)
132
125
133
126
134 @pytest.fixture(scope='session')
127 @pytest.fixture(scope='session')
135 def vcsserver_factory(tmpdir_factory):
128 def vcsserver_factory(tmpdir_factory):
136 """
129 """
137 Use this if you need a running vcsserver with a special configuration.
130 Use this if you need a running vcsserver with a special configuration.
138 """
131 """
139
132
140 def factory(request, use_http=True, overrides=(), vcsserver_port=None):
133 def factory(request, use_http=True, overrides=(), vcsserver_port=None):
141
134
142 if vcsserver_port is None:
135 if vcsserver_port is None:
143 vcsserver_port = get_available_port()
136 vcsserver_port = get_available_port()
144
137
145 overrides = list(overrides)
138 overrides = list(overrides)
146 if use_http:
139 if use_http:
147 overrides.append({'server:main': {'port': vcsserver_port}})
140 overrides.append({'server:main': {'port': vcsserver_port}})
148 else:
141 else:
149 overrides.append({'DEFAULT': {'port': vcsserver_port}})
142 overrides.append({'DEFAULT': {'port': vcsserver_port}})
150
143
151 if is_cygwin():
144 if is_cygwin():
152 platform_override = {'DEFAULT': {
145 platform_override = {'DEFAULT': {
153 'beaker.cache.repo_object.type': 'nocache'}}
146 'beaker.cache.repo_object.type': 'nocache'}}
154 overrides.append(platform_override)
147 overrides.append(platform_override)
155
148
156 option_name = (
149 option_name = 'vcsserver_config_http' if use_http else ''
157 'vcsserver_config_http' if use_http else 'vcsserver_config_pyro4')
158 override_option_name = 'vcsserver_config_override'
150 override_option_name = 'vcsserver_config_override'
159 config_file = get_config(
151 config_file = get_config(
160 request.config, option_name=option_name,
152 request.config, option_name=option_name,
161 override_option_name=override_option_name, overrides=overrides,
153 override_option_name=override_option_name, overrides=overrides,
162 basetemp=tmpdir_factory.getbasetemp().strpath,
154 basetemp=tmpdir_factory.getbasetemp().strpath,
163 prefix='test_vcs_')
155 prefix='test_vcs_')
164
156
165 print "Using the VCSServer configuration", config_file
157 print("Using the VCSServer configuration:{}".format(config_file))
166 ServerClass = HttpVCSServer if use_http else Pyro4VCSServer
158 ServerClass = HttpVCSServer if use_http else None
167 server = ServerClass(config_file)
159 server = ServerClass(config_file)
168 server.start()
160 server.start()
169
161
170 @request.addfinalizer
162 @request.addfinalizer
171 def cleanup():
163 def cleanup():
172 server.shutdown()
164 server.shutdown()
173
165
174 server.wait_until_ready()
166 server.wait_until_ready()
175 return server
167 return server
176
168
177 return factory
169 return factory
178
170
179
171
180 def is_cygwin():
172 def is_cygwin():
181 return 'cygwin' in platform.system().lower()
173 return 'cygwin' in platform.system().lower()
182
174
183
175
184 def _use_vcs_http_server(config):
176 def _use_vcs_http_server(config):
185 protocol_option = 'vcsserver_protocol'
177 protocol_option = 'vcsserver_protocol'
186 protocol = (
178 protocol = (
187 config.getoption(protocol_option) or
179 config.getoption(protocol_option) or
188 config.getini(protocol_option) or
180 config.getini(protocol_option) or
189 'http')
181 'http')
190 return protocol == 'http'
182 return protocol == 'http'
191
183
192
184
193 def _use_log_level(config):
185 def _use_log_level(config):
194 level = config.getoption('test_loglevel') or 'warn'
186 level = config.getoption('test_loglevel') or 'warn'
195 return level.upper()
187 return level.upper()
196
188
197
189
198 class VCSServer(object):
190 class VCSServer(object):
199 """
191 """
200 Represents a running VCSServer instance.
192 Represents a running VCSServer instance.
201 """
193 """
202
194
203 _args = []
195 _args = []
204
196
205 def start(self):
197 def start(self):
206 print("Starting the VCSServer: {}".format(self._args))
198 print("Starting the VCSServer: {}".format(self._args))
207 self.process = subprocess32.Popen(self._args)
199 self.process = subprocess32.Popen(self._args)
208
200
209 def wait_until_ready(self, timeout=30):
201 def wait_until_ready(self, timeout=30):
210 raise NotImplementedError()
202 raise NotImplementedError()
211
203
212 def shutdown(self):
204 def shutdown(self):
213 self.process.kill()
205 self.process.kill()
214
206
215
207
216 class Pyro4VCSServer(VCSServer):
217 def __init__(self, config_file):
218 """
219 :param config_file: The config file to start the server with
220 """
221
222 config_data = configobj.ConfigObj(config_file)
223 self._config = config_data['DEFAULT']
224
225 args = ['vcsserver', '--config', config_file]
226 self._args = args
227
228 def wait_until_ready(self, timeout=30):
229 remote_server = vcs.create_vcsserver_proxy(
230 self.server_and_port, 'pyro4')
231 start = time.time()
232 with remote_server:
233 while time.time() - start < timeout:
234 try:
235 remote_server.ping()
236 break
237 except CommunicationError:
238 time.sleep(0.2)
239 else:
240 pytest.exit(
241 "Starting the VCSServer failed or took more than {} "
242 "seconds.".format(timeout))
243
244 @property
245 def server_and_port(self):
246 return '{host}:{port}'.format(**self._config)
247
248
249 class HttpVCSServer(VCSServer):
208 class HttpVCSServer(VCSServer):
250 """
209 """
251 Represents a running VCSServer instance.
210 Represents a running VCSServer instance.
252 """
211 """
253 def __init__(self, config_file):
212 def __init__(self, config_file):
254 config_data = configobj.ConfigObj(config_file)
213 config_data = configobj.ConfigObj(config_file)
255 self._config = config_data['server:main']
214 self._config = config_data['server:main']
256
215
257 args = ['pserve', config_file]
216 args = ['pserve', config_file]
258 self._args = args
217 self._args = args
259
218
260 @property
219 @property
261 def http_url(self):
220 def http_url(self):
262 template = 'http://{host}:{port}/'
221 template = 'http://{host}:{port}/'
263 return template.format(**self._config)
222 return template.format(**self._config)
264
223
265 def start(self):
224 def start(self):
266 self.process = subprocess32.Popen(self._args)
225 self.process = subprocess32.Popen(self._args)
267
226
268 def wait_until_ready(self, timeout=30):
227 def wait_until_ready(self, timeout=30):
269 host = self._config['host']
228 host = self._config['host']
270 port = self._config['port']
229 port = self._config['port']
271 status_url = 'http://{host}:{port}/status'.format(host=host, port=port)
230 status_url = 'http://{host}:{port}/status'.format(host=host, port=port)
272 start = time.time()
231 start = time.time()
273
232
274 while time.time() - start < timeout:
233 while time.time() - start < timeout:
275 try:
234 try:
276 urlopen(status_url)
235 urlopen(status_url)
277 break
236 break
278 except URLError:
237 except URLError:
279 time.sleep(0.2)
238 time.sleep(0.2)
280 else:
239 else:
281 pytest.exit(
240 pytest.exit(
282 "Starting the VCSServer failed or took more than {} "
241 "Starting the VCSServer failed or took more than {} "
283 "seconds. cmd: `{}`".format(timeout, ' '.join(self._args)))
242 "seconds. cmd: `{}`".format(timeout, ' '.join(self._args)))
284
243
285 def shutdown(self):
244 def shutdown(self):
286 self.process.kill()
245 self.process.kill()
287
246
288
247
289 @pytest.fixture(scope='session')
248 @pytest.fixture(scope='session')
290 def pylons_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
249 def pylons_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
291 option_name = 'pylons_config'
250 option_name = 'pylons_config'
292 log_level = _use_log_level(request.config)
251 log_level = _use_log_level(request.config)
293
252
294 overrides = [
253 overrides = [
295 {'server:main': {'port': rcserver_port}},
254 {'server:main': {'port': rcserver_port}},
296 {'app:main': {
255 {'app:main': {
297 'vcs.server': 'localhost:%s' % vcsserver_port,
256 'vcs.server': 'localhost:%s' % vcsserver_port,
298 # johbo: We will always start the VCSServer on our own based on the
257 # johbo: We will always start the VCSServer on our own based on the
299 # fixtures of the test cases. For the test run it must always be
258 # fixtures of the test cases. For the test run it must always be
300 # off in the INI file.
259 # off in the INI file.
301 'vcs.start_server': 'false',
260 'vcs.start_server': 'false',
302 }},
261 }},
303
262
304 {'handler_console': {
263 {'handler_console': {
305 'class ': 'StreamHandler',
264 'class ': 'StreamHandler',
306 'args ': '(sys.stderr,)',
265 'args ': '(sys.stderr,)',
307 'level': log_level,
266 'level': log_level,
308 }},
267 }},
309
268
310 ]
269 ]
311 if _use_vcs_http_server(request.config):
270 if _use_vcs_http_server(request.config):
312 overrides.append({
271 overrides.append({
313 'app:main': {
272 'app:main': {
314 'vcs.server.protocol': 'http',
273 'vcs.server.protocol': 'http',
315 'vcs.scm_app_implementation': 'http',
274 'vcs.scm_app_implementation': 'http',
316 'vcs.hooks.protocol': 'http',
275 'vcs.hooks.protocol': 'http',
317 }
276 }
318 })
277 })
319 else:
320 overrides.append({
321 'app:main': {
322 'vcs.server.protocol': 'pyro4',
323 'vcs.scm_app_implementation': 'pyro4',
324 'vcs.hooks.protocol': 'pyro4',
325 }
326 })
327
278
328 filename = get_config(
279 filename = get_config(
329 request.config, option_name=option_name,
280 request.config, option_name=option_name,
330 override_option_name='{}_override'.format(option_name),
281 override_option_name='{}_override'.format(option_name),
331 overrides=overrides,
282 overrides=overrides,
332 basetemp=tmpdir_factory.getbasetemp().strpath,
283 basetemp=tmpdir_factory.getbasetemp().strpath,
333 prefix='test_rce_')
284 prefix='test_rce_')
334 return filename
285 return filename
335
286
336
287
337 @pytest.fixture(scope='session')
288 @pytest.fixture(scope='session')
338 def rcserver_port(request):
289 def rcserver_port(request):
339 port = get_available_port()
290 port = get_available_port()
340 print 'Using rcserver port %s' % (port, )
291 print('Using rcserver port {}'.format(port))
341 return port
292 return port
342
293
343
294
344 @pytest.fixture(scope='session')
295 @pytest.fixture(scope='session')
345 def vcsserver_port(request):
296 def vcsserver_port(request):
346 port = request.config.getoption('--vcsserver-port')
297 port = request.config.getoption('--vcsserver-port')
347 if port is None:
298 if port is None:
348 port = get_available_port()
299 port = get_available_port()
349 print 'Using vcsserver port %s' % (port, )
300 print('Using vcsserver port {}'.format(port))
350 return port
301 return port
351
302
352
303
353 def get_available_port():
304 def get_available_port():
354 family = socket.AF_INET
305 family = socket.AF_INET
355 socktype = socket.SOCK_STREAM
306 socktype = socket.SOCK_STREAM
356 host = '127.0.0.1'
307 host = '127.0.0.1'
357
308
358 mysocket = socket.socket(family, socktype)
309 mysocket = socket.socket(family, socktype)
359 mysocket.bind((host, 0))
310 mysocket.bind((host, 0))
360 port = mysocket.getsockname()[1]
311 port = mysocket.getsockname()[1]
361 mysocket.close()
312 mysocket.close()
362 del mysocket
313 del mysocket
363 return port
314 return port
364
315
365
316
366 @pytest.fixture(scope='session')
317 @pytest.fixture(scope='session')
367 def available_port_factory():
318 def available_port_factory():
368 """
319 """
369 Returns a callable which returns free port numbers.
320 Returns a callable which returns free port numbers.
370 """
321 """
371 return get_available_port
322 return get_available_port
372
323
373
324
374 @pytest.fixture
325 @pytest.fixture
375 def available_port(available_port_factory):
326 def available_port(available_port_factory):
376 """
327 """
377 Gives you one free port for the current test.
328 Gives you one free port for the current test.
378
329
379 Uses "available_port_factory" to retrieve the port.
330 Uses "available_port_factory" to retrieve the port.
380 """
331 """
381 return available_port_factory()
332 return available_port_factory()
382
333
383
334
384 @pytest.fixture(scope='session')
335 @pytest.fixture(scope='session')
385 def pylonsapp(pylons_config, vcsserver, http_environ_session):
336 def pylonsapp(pylons_config, vcsserver, http_environ_session):
386 print "Using the RhodeCode configuration", pylons_config
337 print("Using the RhodeCode configuration:{}".format(pylons_config))
387 logging.config.fileConfig(
338 logging.config.fileConfig(
388 pylons_config, disable_existing_loggers=False)
339 pylons_config, disable_existing_loggers=False)
389 app = _setup_pylons_environment(pylons_config, http_environ_session)
340 app = _setup_pylons_environment(pylons_config, http_environ_session)
390 return app
341 return app
391
342
392
343
393 @pytest.fixture(scope='session')
344 @pytest.fixture(scope='session')
394 def testini_factory(tmpdir_factory, pylons_config):
345 def testini_factory(tmpdir_factory, pylons_config):
395 """
346 """
396 Factory to create an INI file based on TestINI.
347 Factory to create an INI file based on TestINI.
397
348
398 It will make sure to place the INI file in the correct directory.
349 It will make sure to place the INI file in the correct directory.
399 """
350 """
400 basetemp = tmpdir_factory.getbasetemp().strpath
351 basetemp = tmpdir_factory.getbasetemp().strpath
401 return TestIniFactory(basetemp, pylons_config)
352 return TestIniFactory(basetemp, pylons_config)
402
353
403
354
404 class TestIniFactory(object):
355 class TestIniFactory(object):
405
356
406 def __init__(self, basetemp, template_ini):
357 def __init__(self, basetemp, template_ini):
407 self._basetemp = basetemp
358 self._basetemp = basetemp
408 self._template_ini = template_ini
359 self._template_ini = template_ini
409
360
410 def __call__(self, ini_params, new_file_prefix='test'):
361 def __call__(self, ini_params, new_file_prefix='test'):
411 ini_file = TestINI(
362 ini_file = TestINI(
412 self._template_ini, ini_params=ini_params,
363 self._template_ini, ini_params=ini_params,
413 new_file_prefix=new_file_prefix, dir=self._basetemp)
364 new_file_prefix=new_file_prefix, dir=self._basetemp)
414 result = ini_file.create()
365 result = ini_file.create()
415 return result
366 return result
416
367
417
368
418 def get_config(
369 def get_config(
419 config, option_name, override_option_name, overrides=None,
370 config, option_name, override_option_name, overrides=None,
420 basetemp=None, prefix='test'):
371 basetemp=None, prefix='test'):
421 """
372 """
422 Find a configuration file and apply overrides for the given `prefix`.
373 Find a configuration file and apply overrides for the given `prefix`.
423 """
374 """
424 config_file = (
375 config_file = (
425 config.getoption(option_name) or config.getini(option_name))
376 config.getoption(option_name) or config.getini(option_name))
426 if not config_file:
377 if not config_file:
427 pytest.exit(
378 pytest.exit(
428 "Configuration error, could not extract {}.".format(option_name))
379 "Configuration error, could not extract {}.".format(option_name))
429
380
430 overrides = overrides or []
381 overrides = overrides or []
431 config_override = config.getoption(override_option_name)
382 config_override = config.getoption(override_option_name)
432 if config_override:
383 if config_override:
433 overrides.append(config_override)
384 overrides.append(config_override)
434 temp_ini_file = TestINI(
385 temp_ini_file = TestINI(
435 config_file, ini_params=overrides, new_file_prefix=prefix,
386 config_file, ini_params=overrides, new_file_prefix=prefix,
436 dir=basetemp)
387 dir=basetemp)
437
388
438 return temp_ini_file.create()
389 return temp_ini_file.create()
439
390
440
391
441 def _setup_pylons_environment(pylons_config, http_environ):
392 def _setup_pylons_environment(pylons_config, http_environ):
442 current_path = os.getcwd()
393 current_path = os.getcwd()
443 pylonsapp = loadapp(
394 pylonsapp = loadapp(
444 'config:' + pylons_config, relative_to=current_path)
395 'config:' + pylons_config, relative_to=current_path)
445
396
446 # Using rhodecode.CONFIG which is assigned during "load_environment".
397 # Using rhodecode.CONFIG which is assigned during "load_environment".
447 # The indirect approach is used, because "pylonsapp" may actually be
398 # The indirect approach is used, because "pylonsapp" may actually be
448 # the Pyramid application.
399 # the Pyramid application.
449 pylonsapp_config = rhodecode.CONFIG
400 pylonsapp_config = rhodecode.CONFIG
450 _init_stack(pylonsapp_config, environ=http_environ)
401 _init_stack(pylonsapp_config, environ=http_environ)
451
402
452 # For compatibility add the attribute "config" which would be
403 # For compatibility add the attribute "config" which would be
453 # present on the Pylons application.
404 # present on the Pylons application.
454 pylonsapp.config = pylonsapp_config
405 pylonsapp.config = pylonsapp_config
455 return pylonsapp
406 return pylonsapp
456
407
457
408
458 def _init_stack(config=None, environ=None):
409 def _init_stack(config=None, environ=None):
459 if not config:
410 if not config:
460 config = pylons.test.pylonsapp.config
411 config = pylons.test.pylonsapp.config
461 if not environ:
412 if not environ:
462 environ = {}
413 environ = {}
463 pylons.url._push_object(URLGenerator(config['routes.map'], environ or {}))
414 pylons.url._push_object(URLGenerator(config['routes.map'], environ or {}))
464 pylons.app_globals._push_object(config['pylons.app_globals'])
415 pylons.app_globals._push_object(config['pylons.app_globals'])
465 pylons.config._push_object(config)
416 pylons.config._push_object(config)
466 pylons.tmpl_context._push_object(ContextObj())
417 pylons.tmpl_context._push_object(ContextObj())
467 # Initialize a translator for tests that utilize i18n
418 # Initialize a translator for tests that utilize i18n
468 translator = _get_translator(pylons.config.get('lang'))
419 translator = _get_translator(pylons.config.get('lang'))
469 pylons.translator._push_object(translator)
420 pylons.translator._push_object(translator)
470 pylons.session._push_object(SessionObject(environ or {}))
421 pylons.session._push_object(SessionObject(environ or {}))
471 pylons.request._push_object(webob.Request.blank('', environ=environ))
422 pylons.request._push_object(webob.Request.blank('', environ=environ))
@@ -1,716 +1,707 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE ENTERPRISE CONFIGURATION ##
4 ## RHODECODE ENTERPRISE CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 0.0.0.0
46 host = 0.0.0.0
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 use = egg:waitress#main
54 use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 threads = 5
56 threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 max_request_body_size = 107374182400
58 max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 asyncore_use_poll = true
61 asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini>
67 ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini>
68
68
69 #use = egg:gunicorn#main
69 #use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 #workers = 2
74 #workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommened to be at 1
76 ## generally recommened to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 #proc_name = rhodecode
79 #proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 #worker_class = sync
82 #worker_class = sync
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 #max_requests = 1000
87 #max_requests = 1000
88 #max_requests_jitter = 30
88 #max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 #timeout = 21600
91 #timeout = 21600
92
92
93 ## UWSGI ##
93 ## UWSGI ##
94 ## run with uwsgi --ini-paste-logged <inifile.ini>
94 ## run with uwsgi --ini-paste-logged <inifile.ini>
95 #[uwsgi]
95 #[uwsgi]
96 #socket = /tmp/uwsgi.sock
96 #socket = /tmp/uwsgi.sock
97 #master = true
97 #master = true
98 #http = 127.0.0.1:5000
98 #http = 127.0.0.1:5000
99
99
100 ## set as deamon and redirect all output to file
100 ## set as deamon and redirect all output to file
101 #daemonize = ./uwsgi_rhodecode.log
101 #daemonize = ./uwsgi_rhodecode.log
102
102
103 ## master process PID
103 ## master process PID
104 #pidfile = ./uwsgi_rhodecode.pid
104 #pidfile = ./uwsgi_rhodecode.pid
105
105
106 ## stats server with workers statistics, use uwsgitop
106 ## stats server with workers statistics, use uwsgitop
107 ## for monitoring, `uwsgitop 127.0.0.1:1717`
107 ## for monitoring, `uwsgitop 127.0.0.1:1717`
108 #stats = 127.0.0.1:1717
108 #stats = 127.0.0.1:1717
109 #memory-report = true
109 #memory-report = true
110
110
111 ## log 5XX errors
111 ## log 5XX errors
112 #log-5xx = true
112 #log-5xx = true
113
113
114 ## Set the socket listen queue size.
114 ## Set the socket listen queue size.
115 #listen = 256
115 #listen = 256
116
116
117 ## Gracefully Reload workers after the specified amount of managed requests
117 ## Gracefully Reload workers after the specified amount of managed requests
118 ## (avoid memory leaks).
118 ## (avoid memory leaks).
119 #max-requests = 1000
119 #max-requests = 1000
120
120
121 ## enable large buffers
121 ## enable large buffers
122 #buffer-size=65535
122 #buffer-size=65535
123
123
124 ## socket and http timeouts ##
124 ## socket and http timeouts ##
125 #http-timeout=3600
125 #http-timeout=3600
126 #socket-timeout=3600
126 #socket-timeout=3600
127
127
128 ## Log requests slower than the specified number of milliseconds.
128 ## Log requests slower than the specified number of milliseconds.
129 #log-slow = 10
129 #log-slow = 10
130
130
131 ## Exit if no app can be loaded.
131 ## Exit if no app can be loaded.
132 #need-app = true
132 #need-app = true
133
133
134 ## Set lazy mode (load apps in workers instead of master).
134 ## Set lazy mode (load apps in workers instead of master).
135 #lazy = true
135 #lazy = true
136
136
137 ## scaling ##
137 ## scaling ##
138 ## set cheaper algorithm to use, if not set default will be used
138 ## set cheaper algorithm to use, if not set default will be used
139 #cheaper-algo = spare
139 #cheaper-algo = spare
140
140
141 ## minimum number of workers to keep at all times
141 ## minimum number of workers to keep at all times
142 #cheaper = 1
142 #cheaper = 1
143
143
144 ## number of workers to spawn at startup
144 ## number of workers to spawn at startup
145 #cheaper-initial = 1
145 #cheaper-initial = 1
146
146
147 ## maximum number of workers that can be spawned
147 ## maximum number of workers that can be spawned
148 #workers = 4
148 #workers = 4
149
149
150 ## how many workers should be spawned at a time
150 ## how many workers should be spawned at a time
151 #cheaper-step = 1
151 #cheaper-step = 1
152
152
153 ## prefix middleware for RhodeCode.
153 ## prefix middleware for RhodeCode.
154 ## recommended when using proxy setup.
154 ## recommended when using proxy setup.
155 ## allows to set RhodeCode under a prefix in server.
155 ## allows to set RhodeCode under a prefix in server.
156 ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well.
156 ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well.
157 ## optionally set prefix like: `prefix = /<your-prefix>`
157 ## optionally set prefix like: `prefix = /<your-prefix>`
158 [filter:proxy-prefix]
158 [filter:proxy-prefix]
159 use = egg:PasteDeploy#prefix
159 use = egg:PasteDeploy#prefix
160 prefix = /
160 prefix = /
161
161
162 [app:main]
162 [app:main]
163 is_test = True
163 is_test = True
164 use = egg:rhodecode-enterprise-ce
164 use = egg:rhodecode-enterprise-ce
165
165
166 ## enable proxy prefix middleware, defined above
166 ## enable proxy prefix middleware, defined above
167 #filter-with = proxy-prefix
167 #filter-with = proxy-prefix
168
168
169
169
170 ## RHODECODE PLUGINS ##
170 ## RHODECODE PLUGINS ##
171 rhodecode.includes = rhodecode.api
171 rhodecode.includes = rhodecode.api
172
172
173 # api prefix url
173 # api prefix url
174 rhodecode.api.url = /_admin/api
174 rhodecode.api.url = /_admin/api
175
175
176
176
177 ## END RHODECODE PLUGINS ##
177 ## END RHODECODE PLUGINS ##
178
178
179 ## encryption key used to encrypt social plugin tokens,
179 ## encryption key used to encrypt social plugin tokens,
180 ## remote_urls with credentials etc, if not set it defaults to
180 ## remote_urls with credentials etc, if not set it defaults to
181 ## `beaker.session.secret`
181 ## `beaker.session.secret`
182 #rhodecode.encrypted_values.secret =
182 #rhodecode.encrypted_values.secret =
183
183
184 ## decryption strict mode (enabled by default). It controls if decryption raises
184 ## decryption strict mode (enabled by default). It controls if decryption raises
185 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
185 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
186 #rhodecode.encrypted_values.strict = false
186 #rhodecode.encrypted_values.strict = false
187
187
188 ## return gzipped responses from Rhodecode (static files/application)
188 ## return gzipped responses from Rhodecode (static files/application)
189 gzip_responses = false
189 gzip_responses = false
190
190
191 ## autogenerate javascript routes file on startup
191 ## autogenerate javascript routes file on startup
192 generate_js_files = false
192 generate_js_files = false
193
193
194 ## Optional Languages
194 ## Optional Languages
195 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
195 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
196 lang = en
196 lang = en
197
197
198 ## perform a full repository scan on each server start, this should be
198 ## perform a full repository scan on each server start, this should be
199 ## set to false after first startup, to allow faster server restarts.
199 ## set to false after first startup, to allow faster server restarts.
200 startup.import_repos = true
200 startup.import_repos = true
201
201
202 ## Uncomment and set this path to use archive download cache.
202 ## Uncomment and set this path to use archive download cache.
203 ## Once enabled, generated archives will be cached at this location
203 ## Once enabled, generated archives will be cached at this location
204 ## and served from the cache during subsequent requests for the same archive of
204 ## and served from the cache during subsequent requests for the same archive of
205 ## the repository.
205 ## the repository.
206 #archive_cache_dir = /tmp/tarballcache
206 #archive_cache_dir = /tmp/tarballcache
207
207
208 ## change this to unique ID for security
208 ## change this to unique ID for security
209 app_instance_uuid = rc-production
209 app_instance_uuid = rc-production
210
210
211 ## cut off limit for large diffs (size in bytes)
211 ## cut off limit for large diffs (size in bytes)
212 cut_off_limit_diff = 1024000
212 cut_off_limit_diff = 1024000
213 cut_off_limit_file = 256000
213 cut_off_limit_file = 256000
214
214
215 ## use cache version of scm repo everywhere
215 ## use cache version of scm repo everywhere
216 vcs_full_cache = false
216 vcs_full_cache = false
217
217
218 ## force https in RhodeCode, fixes https redirects, assumes it's always https
218 ## force https in RhodeCode, fixes https redirects, assumes it's always https
219 ## Normally this is controlled by proper http flags sent from http server
219 ## Normally this is controlled by proper http flags sent from http server
220 force_https = false
220 force_https = false
221
221
222 ## use Strict-Transport-Security headers
222 ## use Strict-Transport-Security headers
223 use_htsts = false
223 use_htsts = false
224
224
225 ## number of commits stats will parse on each iteration
225 ## number of commits stats will parse on each iteration
226 commit_parse_limit = 25
226 commit_parse_limit = 25
227
227
228 ## git rev filter option, --all is the default filter, if you need to
228 ## git rev filter option, --all is the default filter, if you need to
229 ## hide all refs in changelog switch this to --branches --tags
229 ## hide all refs in changelog switch this to --branches --tags
230 git_rev_filter = --all
230 git_rev_filter = --all
231
231
232 # Set to true if your repos are exposed using the dumb protocol
232 # Set to true if your repos are exposed using the dumb protocol
233 git_update_server_info = false
233 git_update_server_info = false
234
234
235 ## RSS/ATOM feed options
235 ## RSS/ATOM feed options
236 rss_cut_off_limit = 256000
236 rss_cut_off_limit = 256000
237 rss_items_per_page = 10
237 rss_items_per_page = 10
238 rss_include_diff = false
238 rss_include_diff = false
239
239
240 ## gist URL alias, used to create nicer urls for gist. This should be an
240 ## gist URL alias, used to create nicer urls for gist. This should be an
241 ## url that does rewrites to _admin/gists/<gistid>.
241 ## url that does rewrites to _admin/gists/<gistid>.
242 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
242 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
243 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid>
243 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid>
244 gist_alias_url =
244 gist_alias_url =
245
245
246 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
246 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
247 ## used for access.
247 ## used for access.
248 ## Adding ?auth_token = <token> to the url authenticates this request as if it
248 ## Adding ?auth_token = <token> to the url authenticates this request as if it
249 ## came from the the logged in user who own this authentication token.
249 ## came from the the logged in user who own this authentication token.
250 ##
250 ##
251 ## Syntax is <ControllerClass>:<function_pattern>.
251 ## Syntax is <ControllerClass>:<function_pattern>.
252 ## To enable access to raw_files put `FilesController:raw`.
252 ## To enable access to raw_files put `FilesController:raw`.
253 ## To enable access to patches add `ChangesetController:changeset_patch`.
253 ## To enable access to patches add `ChangesetController:changeset_patch`.
254 ## The list should be "," separated and on a single line.
254 ## The list should be "," separated and on a single line.
255 ##
255 ##
256 ## Recommended controllers to enable:
256 ## Recommended controllers to enable:
257 # ChangesetController:changeset_patch,
257 # ChangesetController:changeset_patch,
258 # ChangesetController:changeset_raw,
258 # ChangesetController:changeset_raw,
259 # FilesController:raw,
259 # FilesController:raw,
260 # FilesController:archivefile,
260 # FilesController:archivefile,
261 # GistsController:*,
261 # GistsController:*,
262 api_access_controllers_whitelist =
262 api_access_controllers_whitelist =
263
263
264 ## default encoding used to convert from and to unicode
264 ## default encoding used to convert from and to unicode
265 ## can be also a comma separated list of encoding in case of mixed encodings
265 ## can be also a comma separated list of encoding in case of mixed encodings
266 default_encoding = UTF-8
266 default_encoding = UTF-8
267
267
268 ## instance-id prefix
268 ## instance-id prefix
269 ## a prefix key for this instance used for cache invalidation when running
269 ## a prefix key for this instance used for cache invalidation when running
270 ## multiple instances of rhodecode, make sure it's globally unique for
270 ## multiple instances of rhodecode, make sure it's globally unique for
271 ## all running rhodecode instances. Leave empty if you don't use it
271 ## all running rhodecode instances. Leave empty if you don't use it
272 instance_id =
272 instance_id =
273
273
274 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
274 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
275 ## of an authentication plugin also if it is disabled by it's settings.
275 ## of an authentication plugin also if it is disabled by it's settings.
276 ## This could be useful if you are unable to log in to the system due to broken
276 ## This could be useful if you are unable to log in to the system due to broken
277 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
277 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
278 ## module to log in again and fix the settings.
278 ## module to log in again and fix the settings.
279 ##
279 ##
280 ## Available builtin plugin IDs (hash is part of the ID):
280 ## Available builtin plugin IDs (hash is part of the ID):
281 ## egg:rhodecode-enterprise-ce#rhodecode
281 ## egg:rhodecode-enterprise-ce#rhodecode
282 ## egg:rhodecode-enterprise-ce#pam
282 ## egg:rhodecode-enterprise-ce#pam
283 ## egg:rhodecode-enterprise-ce#ldap
283 ## egg:rhodecode-enterprise-ce#ldap
284 ## egg:rhodecode-enterprise-ce#jasig_cas
284 ## egg:rhodecode-enterprise-ce#jasig_cas
285 ## egg:rhodecode-enterprise-ce#headers
285 ## egg:rhodecode-enterprise-ce#headers
286 ## egg:rhodecode-enterprise-ce#crowd
286 ## egg:rhodecode-enterprise-ce#crowd
287 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
287 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
288
288
289 ## alternative return HTTP header for failed authentication. Default HTTP
289 ## alternative return HTTP header for failed authentication. Default HTTP
290 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
290 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
291 ## handling that causing a series of failed authentication calls.
291 ## handling that causing a series of failed authentication calls.
292 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
292 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
293 ## This will be served instead of default 401 on bad authnetication
293 ## This will be served instead of default 401 on bad authnetication
294 auth_ret_code =
294 auth_ret_code =
295
295
296 ## use special detection method when serving auth_ret_code, instead of serving
296 ## use special detection method when serving auth_ret_code, instead of serving
297 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
297 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
298 ## and then serve auth_ret_code to clients
298 ## and then serve auth_ret_code to clients
299 auth_ret_code_detection = false
299 auth_ret_code_detection = false
300
300
301 ## locking return code. When repository is locked return this HTTP code. 2XX
301 ## locking return code. When repository is locked return this HTTP code. 2XX
302 ## codes don't break the transactions while 4XX codes do
302 ## codes don't break the transactions while 4XX codes do
303 lock_ret_code = 423
303 lock_ret_code = 423
304
304
305 ## allows to change the repository location in settings page
305 ## allows to change the repository location in settings page
306 allow_repo_location_change = true
306 allow_repo_location_change = true
307
307
308 ## allows to setup custom hooks in settings page
308 ## allows to setup custom hooks in settings page
309 allow_custom_hooks_settings = true
309 allow_custom_hooks_settings = true
310
310
311 ## generated license token, goto license page in RhodeCode settings to obtain
311 ## generated license token, goto license page in RhodeCode settings to obtain
312 ## new token
312 ## new token
313 license_token = abra-cada-bra1-rce3
313 license_token = abra-cada-bra1-rce3
314
314
315 ## supervisor connection uri, for managing supervisor and logs.
315 ## supervisor connection uri, for managing supervisor and logs.
316 supervisor.uri =
316 supervisor.uri =
317 ## supervisord group name/id we only want this RC instance to handle
317 ## supervisord group name/id we only want this RC instance to handle
318 supervisor.group_id = dev
318 supervisor.group_id = dev
319
319
320 ## Display extended labs settings
320 ## Display extended labs settings
321 labs_settings_active = true
321 labs_settings_active = true
322
322
323 ####################################
323 ####################################
324 ### CELERY CONFIG ####
324 ### CELERY CONFIG ####
325 ####################################
325 ####################################
326 use_celery = false
326 use_celery = false
327 broker.host = localhost
327 broker.host = localhost
328 broker.vhost = rabbitmqhost
328 broker.vhost = rabbitmqhost
329 broker.port = 5672
329 broker.port = 5672
330 broker.user = rabbitmq
330 broker.user = rabbitmq
331 broker.password = qweqwe
331 broker.password = qweqwe
332
332
333 celery.imports = rhodecode.lib.celerylib.tasks
333 celery.imports = rhodecode.lib.celerylib.tasks
334
334
335 celery.result.backend = amqp
335 celery.result.backend = amqp
336 celery.result.dburi = amqp://
336 celery.result.dburi = amqp://
337 celery.result.serialier = json
337 celery.result.serialier = json
338
338
339 #celery.send.task.error.emails = true
339 #celery.send.task.error.emails = true
340 #celery.amqp.task.result.expires = 18000
340 #celery.amqp.task.result.expires = 18000
341
341
342 celeryd.concurrency = 2
342 celeryd.concurrency = 2
343 #celeryd.log.file = celeryd.log
343 #celeryd.log.file = celeryd.log
344 celeryd.log.level = debug
344 celeryd.log.level = debug
345 celeryd.max.tasks.per.child = 1
345 celeryd.max.tasks.per.child = 1
346
346
347 ## tasks will never be sent to the queue, but executed locally instead.
347 ## tasks will never be sent to the queue, but executed locally instead.
348 celery.always.eager = false
348 celery.always.eager = false
349
349
350 ####################################
350 ####################################
351 ### BEAKER CACHE ####
351 ### BEAKER CACHE ####
352 ####################################
352 ####################################
353 # default cache dir for templates. Putting this into a ramdisk
353 # default cache dir for templates. Putting this into a ramdisk
354 ## can boost performance, eg. %(here)s/data_ramdisk
354 ## can boost performance, eg. %(here)s/data_ramdisk
355 cache_dir = %(here)s/data
355 cache_dir = %(here)s/data
356
356
357 ## locking and default file storage for Beaker. Putting this into a ramdisk
357 ## locking and default file storage for Beaker. Putting this into a ramdisk
358 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
358 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
359 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
359 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
360 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
360 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
361
361
362 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
362 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
363
363
364 beaker.cache.super_short_term.type = memory
364 beaker.cache.super_short_term.type = memory
365 beaker.cache.super_short_term.expire = 1
365 beaker.cache.super_short_term.expire = 1
366 beaker.cache.super_short_term.key_length = 256
366 beaker.cache.super_short_term.key_length = 256
367
367
368 beaker.cache.short_term.type = memory
368 beaker.cache.short_term.type = memory
369 beaker.cache.short_term.expire = 60
369 beaker.cache.short_term.expire = 60
370 beaker.cache.short_term.key_length = 256
370 beaker.cache.short_term.key_length = 256
371
371
372 beaker.cache.long_term.type = memory
372 beaker.cache.long_term.type = memory
373 beaker.cache.long_term.expire = 36000
373 beaker.cache.long_term.expire = 36000
374 beaker.cache.long_term.key_length = 256
374 beaker.cache.long_term.key_length = 256
375
375
376 beaker.cache.sql_cache_short.type = memory
376 beaker.cache.sql_cache_short.type = memory
377 beaker.cache.sql_cache_short.expire = 1
377 beaker.cache.sql_cache_short.expire = 1
378 beaker.cache.sql_cache_short.key_length = 256
378 beaker.cache.sql_cache_short.key_length = 256
379
379
380 ## default is memory cache, configure only if required
380 ## default is memory cache, configure only if required
381 ## using multi-node or multi-worker setup
381 ## using multi-node or multi-worker setup
382 #beaker.cache.auth_plugins.type = ext:database
382 #beaker.cache.auth_plugins.type = ext:database
383 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
383 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
384 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
384 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
385 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
385 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
386 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
386 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
387 #beaker.cache.auth_plugins.sa.pool_size = 10
387 #beaker.cache.auth_plugins.sa.pool_size = 10
388 #beaker.cache.auth_plugins.sa.max_overflow = 0
388 #beaker.cache.auth_plugins.sa.max_overflow = 0
389
389
390 beaker.cache.repo_cache_long.type = memorylru_base
390 beaker.cache.repo_cache_long.type = memorylru_base
391 beaker.cache.repo_cache_long.max_items = 4096
391 beaker.cache.repo_cache_long.max_items = 4096
392 beaker.cache.repo_cache_long.expire = 2592000
392 beaker.cache.repo_cache_long.expire = 2592000
393
393
394 ## default is memorylru_base cache, configure only if required
394 ## default is memorylru_base cache, configure only if required
395 ## using multi-node or multi-worker setup
395 ## using multi-node or multi-worker setup
396 #beaker.cache.repo_cache_long.type = ext:memcached
396 #beaker.cache.repo_cache_long.type = ext:memcached
397 #beaker.cache.repo_cache_long.url = localhost:11211
397 #beaker.cache.repo_cache_long.url = localhost:11211
398 #beaker.cache.repo_cache_long.expire = 1209600
398 #beaker.cache.repo_cache_long.expire = 1209600
399 #beaker.cache.repo_cache_long.key_length = 256
399 #beaker.cache.repo_cache_long.key_length = 256
400
400
401 ####################################
401 ####################################
402 ### BEAKER SESSION ####
402 ### BEAKER SESSION ####
403 ####################################
403 ####################################
404
404
405 ## .session.type is type of storage options for the session, current allowed
405 ## .session.type is type of storage options for the session, current allowed
406 ## types are file, ext:memcached, ext:database, and memory (default).
406 ## types are file, ext:memcached, ext:database, and memory (default).
407 beaker.session.type = file
407 beaker.session.type = file
408 beaker.session.data_dir = %(here)s/rc/data/sessions/data
408 beaker.session.data_dir = %(here)s/rc/data/sessions/data
409
409
410 ## db based session, fast, and allows easy management over logged in users
410 ## db based session, fast, and allows easy management over logged in users
411 #beaker.session.type = ext:database
411 #beaker.session.type = ext:database
412 #beaker.session.table_name = db_session
412 #beaker.session.table_name = db_session
413 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
413 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
414 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
414 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
415 #beaker.session.sa.pool_recycle = 3600
415 #beaker.session.sa.pool_recycle = 3600
416 #beaker.session.sa.echo = false
416 #beaker.session.sa.echo = false
417
417
418 beaker.session.key = rhodecode
418 beaker.session.key = rhodecode
419 beaker.session.secret = test-rc-uytcxaz
419 beaker.session.secret = test-rc-uytcxaz
420 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
420 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
421
421
422 ## Secure encrypted cookie. Requires AES and AES python libraries
422 ## Secure encrypted cookie. Requires AES and AES python libraries
423 ## you must disable beaker.session.secret to use this
423 ## you must disable beaker.session.secret to use this
424 #beaker.session.encrypt_key = <key_for_encryption>
424 #beaker.session.encrypt_key = <key_for_encryption>
425 #beaker.session.validate_key = <validation_key>
425 #beaker.session.validate_key = <validation_key>
426
426
427 ## sets session as invalid(also logging out user) if it haven not been
427 ## sets session as invalid(also logging out user) if it haven not been
428 ## accessed for given amount of time in seconds
428 ## accessed for given amount of time in seconds
429 beaker.session.timeout = 2592000
429 beaker.session.timeout = 2592000
430 beaker.session.httponly = true
430 beaker.session.httponly = true
431 ## Path to use for the cookie.
431 ## Path to use for the cookie.
432 #beaker.session.cookie_path = /<your-prefix>
432 #beaker.session.cookie_path = /<your-prefix>
433
433
434 ## uncomment for https secure cookie
434 ## uncomment for https secure cookie
435 beaker.session.secure = false
435 beaker.session.secure = false
436
436
437 ## auto save the session to not to use .save()
437 ## auto save the session to not to use .save()
438 beaker.session.auto = false
438 beaker.session.auto = false
439
439
440 ## default cookie expiration time in seconds, set to `true` to set expire
440 ## default cookie expiration time in seconds, set to `true` to set expire
441 ## at browser close
441 ## at browser close
442 #beaker.session.cookie_expires = 3600
442 #beaker.session.cookie_expires = 3600
443
443
444 ###################################
444 ###################################
445 ## SEARCH INDEXING CONFIGURATION ##
445 ## SEARCH INDEXING CONFIGURATION ##
446 ###################################
446 ###################################
447 ## Full text search indexer is available in rhodecode-tools under
447 ## Full text search indexer is available in rhodecode-tools under
448 ## `rhodecode-tools index` command
448 ## `rhodecode-tools index` command
449
449
450 # WHOOSH Backend, doesn't require additional services to run
450 # WHOOSH Backend, doesn't require additional services to run
451 # it works good with few dozen repos
451 # it works good with few dozen repos
452 search.module = rhodecode.lib.index.whoosh
452 search.module = rhodecode.lib.index.whoosh
453 search.location = %(here)s/data/index
453 search.location = %(here)s/data/index
454
454
455 ########################################
455 ########################################
456 ### CHANNELSTREAM CONFIG ####
456 ### CHANNELSTREAM CONFIG ####
457 ########################################
457 ########################################
458 ## channelstream enables persistent connections and live notification
458 ## channelstream enables persistent connections and live notification
459 ## in the system. It's also used by the chat system
459 ## in the system. It's also used by the chat system
460
460
461 channelstream.enabled = false
461 channelstream.enabled = false
462 # location of channelstream server on the backend
462 # location of channelstream server on the backend
463 channelstream.server = 127.0.0.1:9800
463 channelstream.server = 127.0.0.1:9800
464 ## location of the channelstream server from outside world
464 ## location of the channelstream server from outside world
465 ## most likely this would be an http server special backend URL, that handles
465 ## most likely this would be an http server special backend URL, that handles
466 ## websocket connections see nginx example for config
466 ## websocket connections see nginx example for config
467 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
467 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
468 channelstream.secret = secret
468 channelstream.secret = secret
469 channelstream.history.location = %(here)s/channelstream_history
469 channelstream.history.location = %(here)s/channelstream_history
470
470
471
471
472 ###################################
472 ###################################
473 ## APPENLIGHT CONFIG ##
473 ## APPENLIGHT CONFIG ##
474 ###################################
474 ###################################
475
475
476 ## Appenlight is tailored to work with RhodeCode, see
476 ## Appenlight is tailored to work with RhodeCode, see
477 ## http://appenlight.com for details how to obtain an account
477 ## http://appenlight.com for details how to obtain an account
478
478
479 ## appenlight integration enabled
479 ## appenlight integration enabled
480 appenlight = false
480 appenlight = false
481
481
482 appenlight.server_url = https://api.appenlight.com
482 appenlight.server_url = https://api.appenlight.com
483 appenlight.api_key = YOUR_API_KEY
483 appenlight.api_key = YOUR_API_KEY
484 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
484 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
485
485
486 # used for JS client
486 # used for JS client
487 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
487 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
488
488
489 ## TWEAK AMOUNT OF INFO SENT HERE
489 ## TWEAK AMOUNT OF INFO SENT HERE
490
490
491 ## enables 404 error logging (default False)
491 ## enables 404 error logging (default False)
492 appenlight.report_404 = false
492 appenlight.report_404 = false
493
493
494 ## time in seconds after request is considered being slow (default 1)
494 ## time in seconds after request is considered being slow (default 1)
495 appenlight.slow_request_time = 1
495 appenlight.slow_request_time = 1
496
496
497 ## record slow requests in application
497 ## record slow requests in application
498 ## (needs to be enabled for slow datastore recording and time tracking)
498 ## (needs to be enabled for slow datastore recording and time tracking)
499 appenlight.slow_requests = true
499 appenlight.slow_requests = true
500
500
501 ## enable hooking to application loggers
501 ## enable hooking to application loggers
502 appenlight.logging = true
502 appenlight.logging = true
503
503
504 ## minimum log level for log capture
504 ## minimum log level for log capture
505 appenlight.logging.level = WARNING
505 appenlight.logging.level = WARNING
506
506
507 ## send logs only from erroneous/slow requests
507 ## send logs only from erroneous/slow requests
508 ## (saves API quota for intensive logging)
508 ## (saves API quota for intensive logging)
509 appenlight.logging_on_error = false
509 appenlight.logging_on_error = false
510
510
511 ## list of additonal keywords that should be grabbed from environ object
511 ## list of additonal keywords that should be grabbed from environ object
512 ## can be string with comma separated list of words in lowercase
512 ## can be string with comma separated list of words in lowercase
513 ## (by default client will always send following info:
513 ## (by default client will always send following info:
514 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
514 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
515 ## start with HTTP* this list be extended with additional keywords here
515 ## start with HTTP* this list be extended with additional keywords here
516 appenlight.environ_keys_whitelist =
516 appenlight.environ_keys_whitelist =
517
517
518 ## list of keywords that should be blanked from request object
518 ## list of keywords that should be blanked from request object
519 ## can be string with comma separated list of words in lowercase
519 ## can be string with comma separated list of words in lowercase
520 ## (by default client will always blank keys that contain following words
520 ## (by default client will always blank keys that contain following words
521 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
521 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
522 ## this list be extended with additional keywords set here
522 ## this list be extended with additional keywords set here
523 appenlight.request_keys_blacklist =
523 appenlight.request_keys_blacklist =
524
524
525 ## list of namespaces that should be ignores when gathering log entries
525 ## list of namespaces that should be ignores when gathering log entries
526 ## can be string with comma separated list of namespaces
526 ## can be string with comma separated list of namespaces
527 ## (by default the client ignores own entries: appenlight_client.client)
527 ## (by default the client ignores own entries: appenlight_client.client)
528 appenlight.log_namespace_blacklist =
528 appenlight.log_namespace_blacklist =
529
529
530
530
531 ################################################################################
531 ################################################################################
532 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
532 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
533 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
533 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
534 ## execute malicious code after an exception is raised. ##
534 ## execute malicious code after an exception is raised. ##
535 ################################################################################
535 ################################################################################
536 set debug = false
536 set debug = false
537
537
538
538
539 ##############
539 ##############
540 ## STYLING ##
540 ## STYLING ##
541 ##############
541 ##############
542 debug_style = false
542 debug_style = false
543
543
544 #########################################################
544 #########################################################
545 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
545 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
546 #########################################################
546 #########################################################
547 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db
547 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db
548 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
548 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
549 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
549 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
550 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db
550 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db
551
551
552 # see sqlalchemy docs for other advanced settings
552 # see sqlalchemy docs for other advanced settings
553
553
554 ## print the sql statements to output
554 ## print the sql statements to output
555 sqlalchemy.db1.echo = false
555 sqlalchemy.db1.echo = false
556 ## recycle the connections after this ammount of seconds
556 ## recycle the connections after this ammount of seconds
557 sqlalchemy.db1.pool_recycle = 3600
557 sqlalchemy.db1.pool_recycle = 3600
558 sqlalchemy.db1.convert_unicode = true
558 sqlalchemy.db1.convert_unicode = true
559
559
560 ## the number of connections to keep open inside the connection pool.
560 ## the number of connections to keep open inside the connection pool.
561 ## 0 indicates no limit
561 ## 0 indicates no limit
562 #sqlalchemy.db1.pool_size = 5
562 #sqlalchemy.db1.pool_size = 5
563
563
564 ## the number of connections to allow in connection pool "overflow", that is
564 ## the number of connections to allow in connection pool "overflow", that is
565 ## connections that can be opened above and beyond the pool_size setting,
565 ## connections that can be opened above and beyond the pool_size setting,
566 ## which defaults to five.
566 ## which defaults to five.
567 #sqlalchemy.db1.max_overflow = 10
567 #sqlalchemy.db1.max_overflow = 10
568
568
569
569
570 ##################
570 ##################
571 ### VCS CONFIG ###
571 ### VCS CONFIG ###
572 ##################
572 ##################
573 vcs.server.enable = true
573 vcs.server.enable = true
574 vcs.server = localhost:9901
574 vcs.server = localhost:9901
575
575
576 ## Web server connectivity protocol, responsible for web based VCS operatations
576 ## Web server connectivity protocol, responsible for web based VCS operatations
577 ## Available protocols are:
577 ## Available protocols are:
578 ## `pyro4` - using pyro4 server
579 ## `http` - using http-rpc backend
578 ## `http` - using http-rpc backend
580 vcs.server.protocol = http
579 vcs.server.protocol = http
581
580
582 ## Push/Pull operations protocol, available options are:
581 ## Push/Pull operations protocol, available options are:
583 ## `pyro4` - using pyro4 server
584 ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended
582 ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended
585 ## `vcsserver.scm_app` - internal app (EE only)
583 ## `vcsserver.scm_app` - internal app (EE only)
586 vcs.scm_app_implementation = http
584 vcs.scm_app_implementation = http
587
585
588 ## Push/Pull operations hooks protocol, available options are:
586 ## Push/Pull operations hooks protocol, available options are:
589 ## `pyro4` - using pyro4 server
590 ## `http` - using http-rpc backend
587 ## `http` - using http-rpc backend
591 vcs.hooks.protocol = http
588 vcs.hooks.protocol = http
592
589
593 vcs.server.log_level = debug
590 vcs.server.log_level = debug
594 ## Start VCSServer with this instance as a subprocess, usefull for development
591 ## Start VCSServer with this instance as a subprocess, usefull for development
595 vcs.start_server = false
592 vcs.start_server = false
596
593
597 ## List of enabled VCS backends, available options are:
594 ## List of enabled VCS backends, available options are:
598 ## `hg` - mercurial
595 ## `hg` - mercurial
599 ## `git` - git
596 ## `git` - git
600 ## `svn` - subversion
597 ## `svn` - subversion
601 vcs.backends = hg, git, svn
598 vcs.backends = hg, git, svn
602
599
603 vcs.connection_timeout = 3600
600 vcs.connection_timeout = 3600
604 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
601 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
605 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
602 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
606 #vcs.svn.compatible_version = pre-1.8-compatible
603 #vcs.svn.compatible_version = pre-1.8-compatible
607
604
608
605
609 ############################################################
606 ############################################################
610 ### Subversion proxy support (mod_dav_svn) ###
607 ### Subversion proxy support (mod_dav_svn) ###
611 ### Maps RhodeCode repo groups into SVN paths for Apache ###
608 ### Maps RhodeCode repo groups into SVN paths for Apache ###
612 ############################################################
609 ############################################################
613 ## Enable or disable the config file generation.
610 ## Enable or disable the config file generation.
614 svn.proxy.generate_config = false
611 svn.proxy.generate_config = false
615 ## Generate config file with `SVNListParentPath` set to `On`.
612 ## Generate config file with `SVNListParentPath` set to `On`.
616 svn.proxy.list_parent_path = true
613 svn.proxy.list_parent_path = true
617 ## Set location and file name of generated config file.
614 ## Set location and file name of generated config file.
618 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
615 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
619 ## File system path to the directory containing the repositories served by
616 ## File system path to the directory containing the repositories served by
620 ## RhodeCode.
617 ## RhodeCode.
621 svn.proxy.parent_path_root = /path/to/repo_store
618 svn.proxy.parent_path_root = /path/to/repo_store
622 ## Used as a prefix to the <Location> block in the generated config file. In
619 ## Used as a prefix to the <Location> block in the generated config file. In
623 ## most cases it should be set to `/`.
620 ## most cases it should be set to `/`.
624 svn.proxy.location_root = /
621 svn.proxy.location_root = /
625
622
626
623
627 ################################
624 ################################
628 ### LOGGING CONFIGURATION ####
625 ### LOGGING CONFIGURATION ####
629 ################################
626 ################################
630 [loggers]
627 [loggers]
631 keys = root, routes, rhodecode, sqlalchemy, beaker, pyro4, templates
628 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
632
629
633 [handlers]
630 [handlers]
634 keys = console, console_sql
631 keys = console, console_sql
635
632
636 [formatters]
633 [formatters]
637 keys = generic, color_formatter, color_formatter_sql
634 keys = generic, color_formatter, color_formatter_sql
638
635
639 #############
636 #############
640 ## LOGGERS ##
637 ## LOGGERS ##
641 #############
638 #############
642 [logger_root]
639 [logger_root]
643 level = NOTSET
640 level = NOTSET
644 handlers = console
641 handlers = console
645
642
646 [logger_routes]
643 [logger_routes]
647 level = DEBUG
644 level = DEBUG
648 handlers =
645 handlers =
649 qualname = routes.middleware
646 qualname = routes.middleware
650 ## "level = DEBUG" logs the route matched and routing variables.
647 ## "level = DEBUG" logs the route matched and routing variables.
651 propagate = 1
648 propagate = 1
652
649
653 [logger_beaker]
650 [logger_beaker]
654 level = DEBUG
651 level = DEBUG
655 handlers =
652 handlers =
656 qualname = beaker.container
653 qualname = beaker.container
657 propagate = 1
654 propagate = 1
658
655
659 [logger_pyro4]
660 level = DEBUG
661 handlers =
662 qualname = Pyro4
663 propagate = 1
664
665 [logger_templates]
656 [logger_templates]
666 level = INFO
657 level = INFO
667 handlers =
658 handlers =
668 qualname = pylons.templating
659 qualname = pylons.templating
669 propagate = 1
660 propagate = 1
670
661
671 [logger_rhodecode]
662 [logger_rhodecode]
672 level = DEBUG
663 level = DEBUG
673 handlers =
664 handlers =
674 qualname = rhodecode
665 qualname = rhodecode
675 propagate = 1
666 propagate = 1
676
667
677 [logger_sqlalchemy]
668 [logger_sqlalchemy]
678 level = ERROR
669 level = ERROR
679 handlers = console_sql
670 handlers = console_sql
680 qualname = sqlalchemy.engine
671 qualname = sqlalchemy.engine
681 propagate = 0
672 propagate = 0
682
673
683 ##############
674 ##############
684 ## HANDLERS ##
675 ## HANDLERS ##
685 ##############
676 ##############
686
677
687 [handler_console]
678 [handler_console]
688 class = StreamHandler
679 class = StreamHandler
689 args = (sys.stderr,)
680 args = (sys.stderr,)
690 level = DEBUG
681 level = DEBUG
691 formatter = generic
682 formatter = generic
692
683
693 [handler_console_sql]
684 [handler_console_sql]
694 class = StreamHandler
685 class = StreamHandler
695 args = (sys.stderr,)
686 args = (sys.stderr,)
696 level = WARN
687 level = WARN
697 formatter = generic
688 formatter = generic
698
689
699 ################
690 ################
700 ## FORMATTERS ##
691 ## FORMATTERS ##
701 ################
692 ################
702
693
703 [formatter_generic]
694 [formatter_generic]
704 class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter
695 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
705 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
696 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
706 datefmt = %Y-%m-%d %H:%M:%S
697 datefmt = %Y-%m-%d %H:%M:%S
707
698
708 [formatter_color_formatter]
699 [formatter_color_formatter]
709 class = rhodecode.lib.logging_formatter.ColorFormatter
700 class = rhodecode.lib.logging_formatter.ColorFormatter
710 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
701 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
711 datefmt = %Y-%m-%d %H:%M:%S
702 datefmt = %Y-%m-%d %H:%M:%S
712
703
713 [formatter_color_formatter_sql]
704 [formatter_color_formatter_sql]
714 class = rhodecode.lib.logging_formatter.ColorFormatterSql
705 class = rhodecode.lib.logging_formatter.ColorFormatterSql
715 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
706 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
716 datefmt = %Y-%m-%d %H:%M:%S
707 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,100 +1,96 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 import mock
23 import mock
24 import msgpack
24 import msgpack
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib import vcs
27 from rhodecode.lib import vcs
28 from rhodecode.lib.vcs import client_http
28 from rhodecode.lib.vcs import client_http
29
29
30
30
31 def test_uses_persistent_http_connections(caplog, vcsbackend_hg):
31 def test_uses_persistent_http_connections(caplog, vcsbackend_hg):
32 repo = vcsbackend_hg.repo
32 repo = vcsbackend_hg.repo
33 remote_call = repo._remote.branches
33 remote_call = repo._remote.branches
34
34
35 with caplog.at_level(logging.INFO):
35 with caplog.at_level(logging.INFO):
36 for x in range(5):
36 for x in range(5):
37 remote_call(normal=True, closed=False)
37 remote_call(normal=True, closed=False)
38
38
39 new_connections = [
39 new_connections = [
40 r for r in caplog.record_tuples if is_new_connection(*r)]
40 r for r in caplog.record_tuples if is_new_connection(*r)]
41 assert len(new_connections) <= 1
41 assert len(new_connections) <= 1
42
42
43
43
44 def is_new_connection(logger, level, message):
44 def is_new_connection(logger, level, message):
45 return (
45 return (
46 logger == 'requests.packages.urllib3.connectionpool' and
46 logger == 'requests.packages.urllib3.connectionpool' and
47 message.startswith('Starting new HTTP'))
47 message.startswith('Starting new HTTP'))
48
48
49
49
50 @pytest.fixture
50 @pytest.fixture
51 def stub_session():
51 def stub_session():
52 """
52 """
53 Stub of `requests.Session()`.
53 Stub of `requests.Session()`.
54 """
54 """
55 session = mock.Mock()
55 session = mock.Mock()
56 session.post().content = msgpack.packb({})
56 session.post().content = msgpack.packb({})
57 session.reset_mock()
57 session.reset_mock()
58 return session
58 return session
59
59
60
60
61 @pytest.fixture
61 @pytest.fixture
62 def stub_session_factory(stub_session):
62 def stub_session_factory(stub_session):
63 """
63 """
64 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
64 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
65 """
65 """
66 session_factory = mock.Mock()
66 session_factory = mock.Mock()
67 session_factory.return_value = stub_session
67 session_factory.return_value = stub_session
68 return session_factory
68 return session_factory
69
69
70
70
71 def test_repo_maker_uses_session_for_classmethods(stub_session_factory):
71 def test_repo_maker_uses_session_for_classmethods(stub_session_factory):
72 repo_maker = client_http.RepoMaker(
72 repo_maker = client_http.RepoMaker(
73 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
73 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
74 repo_maker.example_call()
74 repo_maker.example_call()
75 stub_session_factory().post.assert_called_with(
75 stub_session_factory().post.assert_called_with(
76 'http://server_and_port/endpoint', data=mock.ANY)
76 'http://server_and_port/endpoint', data=mock.ANY)
77
77
78
78
79 def test_repo_maker_uses_session_for_instance_methods(
79 def test_repo_maker_uses_session_for_instance_methods(
80 stub_session_factory, config):
80 stub_session_factory, config):
81 repo_maker = client_http.RepoMaker(
81 repo_maker = client_http.RepoMaker(
82 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
82 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
83 repo = repo_maker('stub_path', config)
83 repo = repo_maker('stub_path', config)
84 repo.example_call()
84 repo.example_call()
85 stub_session_factory().post.assert_called_with(
85 stub_session_factory().post.assert_called_with(
86 'http://server_and_port/endpoint', data=mock.ANY)
86 'http://server_and_port/endpoint', data=mock.ANY)
87
87
88
88
89 @mock.patch('rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory')
89 @mock.patch('rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory')
90 @mock.patch('rhodecode.lib.vcs.connection')
90 @mock.patch('rhodecode.lib.vcs.connection')
91 def test_connect_passes_in_the_same_session(
91 def test_connect_passes_in_the_same_session(
92 connection, session_factory_class, stub_session):
92 connection, session_factory_class, stub_session):
93 session_factory = session_factory_class.return_value
93 session_factory = session_factory_class.return_value
94 session_factory.return_value = stub_session
94 session_factory.return_value = stub_session
95
95
96 vcs.connect_http('server_and_port')
96 vcs.connect_http('server_and_port')
97
98 assert connection.Hg._session_factory() == stub_session
99 assert connection.Svn._session_factory() == stub_session
100 assert connection.Git._session_factory() == stub_session
@@ -1,189 +1,184 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from mock import call, patch
23 from mock import call, patch
24
24
25 from rhodecode.lib.vcs.backends.base import Reference
25 from rhodecode.lib.vcs.backends.base import Reference
26
26
27
27
28 class TestMercurialRemoteRepoInvalidation(object):
28 class TestMercurialRemoteRepoInvalidation(object):
29 """
29 """
30 If the VCSServer is running with multiple processes or/and instances.
30 If the VCSServer is running with multiple processes or/and instances.
31 Operations on repositories are potentially handled by different processes
31 Operations on repositories are potentially handled by different processes
32 in a random fashion. The mercurial repository objects used in the VCSServer
32 in a random fashion. The mercurial repository objects used in the VCSServer
33 are caching the commits of the repo. Therefore we have to invalidate the
33 are caching the commits of the repo. Therefore we have to invalidate the
34 VCSServer caching of these objects after a writing operation.
34 VCSServer caching of these objects after a writing operation.
35 """
35 """
36
36
37 # Default reference used as a dummy during tests.
37 # Default reference used as a dummy during tests.
38 default_ref = Reference('branch', 'default', None)
38 default_ref = Reference('branch', 'default', None)
39
39
40 # Methods of vcsserver.hg.HgRemote that are "writing" operations.
40 # Methods of vcsserver.hg.HgRemote that are "writing" operations.
41 writing_methods = [
41 writing_methods = [
42 'bookmark',
42 'bookmark',
43 'commit',
43 'commit',
44 'merge',
44 'merge',
45 'pull',
45 'pull',
46 'pull_cmd',
46 'pull_cmd',
47 'rebase',
47 'rebase',
48 'strip',
48 'strip',
49 'tag',
49 'tag',
50 ]
50 ]
51
51
52 @pytest.mark.parametrize('method_name, method_args', [
52 @pytest.mark.parametrize('method_name, method_args', [
53 ('_local_merge', [default_ref, None, None, None, default_ref]),
53 ('_local_merge', [default_ref, None, None, None, default_ref]),
54 ('_local_pull', ['', default_ref]),
54 ('_local_pull', ['', default_ref]),
55 ('bookmark', [None]),
55 ('bookmark', [None]),
56 ('pull', ['', default_ref]),
56 ('pull', ['', default_ref]),
57 ('remove_tag', ['mytag', None]),
57 ('remove_tag', ['mytag', None]),
58 ('strip', [None]),
58 ('strip', [None]),
59 ('tag', ['newtag', None]),
59 ('tag', ['newtag', None]),
60 ])
60 ])
61 def test_method_invokes_invalidate_on_remote_repo(
61 def test_method_invokes_invalidate_on_remote_repo(
62 self, method_name, method_args, backend_hg):
62 self, method_name, method_args, backend_hg):
63 """
63 """
64 Check that the listed methods are invalidating the VCSServer cache
64 Check that the listed methods are invalidating the VCSServer cache
65 after invoking a writing method of their remote repository object.
65 after invoking a writing method of their remote repository object.
66 """
66 """
67 tags = {'mytag': 'mytag-id'}
67 tags = {'mytag': 'mytag-id'}
68
68
69 def add_tag(name, raw_id, *args, **kwds):
69 def add_tag(name, raw_id, *args, **kwds):
70 tags[name] = raw_id
70 tags[name] = raw_id
71
71
72 repo = backend_hg.repo.scm_instance()
72 repo = backend_hg.repo.scm_instance()
73 with patch.object(repo, '_remote') as remote:
73 with patch.object(repo, '_remote') as remote:
74 remote.lookup.return_value = ('commit-id', 'commit-idx')
74 remote.lookup.return_value = ('commit-id', 'commit-idx')
75 remote.tags.return_value = tags
75 remote.tags.return_value = tags
76 remote._get_tags.return_value = tags
76 remote._get_tags.return_value = tags
77 remote.tag.side_effect = add_tag
77 remote.tag.side_effect = add_tag
78
78
79 # Invoke method.
79 # Invoke method.
80 method = getattr(repo, method_name)
80 method = getattr(repo, method_name)
81 method(*method_args)
81 method(*method_args)
82
82
83 # Assert that every "writing" method is followed by an invocation
83 # Assert that every "writing" method is followed by an invocation
84 # of the cache invalidation method.
84 # of the cache invalidation method.
85 for counter, method_call in enumerate(remote.method_calls):
85 for counter, method_call in enumerate(remote.method_calls):
86 call_name = method_call[0]
86 call_name = method_call[0]
87 if call_name in self.writing_methods:
87 if call_name in self.writing_methods:
88 next_call = remote.method_calls[counter + 1]
88 next_call = remote.method_calls[counter + 1]
89 assert next_call == call.invalidate_vcs_cache()
89 assert next_call == call.invalidate_vcs_cache()
90
90
91 def _prepare_shadow_repo(self, pull_request):
91 def _prepare_shadow_repo(self, pull_request):
92 """
92 """
93 Helper that creates a shadow repo that can be used to reproduce the
93 Helper that creates a shadow repo that can be used to reproduce the
94 CommitDoesNotExistError when pulling in from target and source
94 CommitDoesNotExistError when pulling in from target and source
95 references.
95 references.
96 """
96 """
97 from rhodecode.model.pull_request import PullRequestModel
97 from rhodecode.model.pull_request import PullRequestModel
98
98
99 target_vcs = pull_request.target_repo.scm_instance()
99 target_vcs = pull_request.target_repo.scm_instance()
100 target_ref = pull_request.target_ref_parts
100 target_ref = pull_request.target_ref_parts
101 source_ref = pull_request.source_ref_parts
101 source_ref = pull_request.source_ref_parts
102
102
103 # Create shadow repository.
103 # Create shadow repository.
104 pr = PullRequestModel()
104 pr = PullRequestModel()
105 workspace_id = pr._workspace_id(pull_request)
105 workspace_id = pr._workspace_id(pull_request)
106 shadow_repository_path = target_vcs._maybe_prepare_merge_workspace(
106 shadow_repository_path = target_vcs._maybe_prepare_merge_workspace(
107 workspace_id, target_ref)
107 workspace_id, target_ref)
108 shadow_repo = target_vcs._get_shadow_instance(shadow_repository_path)
108 shadow_repo = target_vcs._get_shadow_instance(shadow_repository_path)
109
109
110 # This will populate the cache of the mercurial repository object
110 # This will populate the cache of the mercurial repository object
111 # inside of the VCSServer.
111 # inside of the VCSServer.
112 shadow_repo.get_commit()
112 shadow_repo.get_commit()
113
113
114 return shadow_repo, source_ref, target_ref
114 return shadow_repo, source_ref, target_ref
115
115
116 @pytest.mark.backends('hg')
116 @pytest.mark.backends('hg')
117 def test_commit_does_not_exist_error_happens(self, pr_util, pylonsapp):
117 def test_commit_does_not_exist_error_happens(self, pr_util, pylonsapp):
118 """
118 """
119 This test is somewhat special. It does not really test the system
119 This test is somewhat special. It does not really test the system
120 instead it is more or less a precondition for the
120 instead it is more or less a precondition for the
121 "test_commit_does_not_exist_error_does_not_happen". It deactivates the
121 "test_commit_does_not_exist_error_does_not_happen". It deactivates the
122 cache invalidation and asserts that the error occurs.
122 cache invalidation and asserts that the error occurs.
123 """
123 """
124 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
124 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
125
125
126 if pylonsapp.config['vcs.server.protocol'] == 'pyro4':
127 pytest.skip('Test is intended for the HTTP protocol only.')
128
129 pull_request = pr_util.create_pull_request()
126 pull_request = pr_util.create_pull_request()
130 target_vcs = pull_request.target_repo.scm_instance()
127 target_vcs = pull_request.target_repo.scm_instance()
131 source_vcs = pull_request.source_repo.scm_instance()
128 source_vcs = pull_request.source_repo.scm_instance()
132 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
129 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
133 pull_request)
130 pull_request)
134
131
135 # Pull from target and source references but without invalidation of
132 # Pull from target and source references but without invalidation of
136 # RemoteRepo objects and without VCSServer caching of mercurial
133 # RemoteRepo objects and without VCSServer caching of mercurial
137 # repository objects.
134 # repository objects.
138 with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'):
135 with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'):
139 # NOTE: Do not use patch.dict() to disable the cache because it
136 # NOTE: Do not use patch.dict() to disable the cache because it
140 # restores the WHOLE dict and not only the patched keys.
137 # restores the WHOLE dict and not only the patched keys.
141 shadow_repo._remote._wire['cache'] = False
138 shadow_repo._remote._wire['cache'] = False
142 shadow_repo._local_pull(target_vcs.path, target_ref)
139 shadow_repo._local_pull(target_vcs.path, target_ref)
143 shadow_repo._local_pull(source_vcs.path, source_ref)
140 shadow_repo._local_pull(source_vcs.path, source_ref)
144 shadow_repo._remote._wire.pop('cache')
141 shadow_repo._remote._wire.pop('cache')
145
142
146 # Try to lookup the target_ref in shadow repo. This should work because
143 # Try to lookup the target_ref in shadow repo. This should work because
147 # the shadow repo is a clone of the target and always contains all off
144 # the shadow repo is a clone of the target and always contains all off
148 # it's commits in the initial cache.
145 # it's commits in the initial cache.
149 shadow_repo.get_commit(target_ref.commit_id)
146 shadow_repo.get_commit(target_ref.commit_id)
150
147
151 # If we try to lookup the source_ref it should fail because the shadow
148 # If we try to lookup the source_ref it should fail because the shadow
152 # repo commit cache doesn't get invalidated. (Due to patched
149 # repo commit cache doesn't get invalidated. (Due to patched
153 # invalidation and caching above).
150 # invalidation and caching above).
154 with pytest.raises(CommitDoesNotExistError):
151 with pytest.raises(CommitDoesNotExistError):
155 shadow_repo.get_commit(source_ref.commit_id)
152 shadow_repo.get_commit(source_ref.commit_id)
156
153
157 @pytest.mark.backends('hg')
154 @pytest.mark.backends('hg')
158 def test_commit_does_not_exist_error_does_not_happen(
155 def test_commit_does_not_exist_error_does_not_happen(
159 self, pr_util, pylonsapp):
156 self, pr_util, pylonsapp):
160 """
157 """
161 This test simulates a pull request merge in which the pull operations
158 This test simulates a pull request merge in which the pull operations
162 are handled by a different VCSServer process than all other operations.
159 are handled by a different VCSServer process than all other operations.
163 Without correct cache invalidation this leads to an error when
160 Without correct cache invalidation this leads to an error when
164 retrieving the pulled commits afterwards.
161 retrieving the pulled commits afterwards.
165 """
162 """
166 if pylonsapp.config['vcs.server.protocol'] == 'pyro4':
167 pytest.skip('Test is intended for the HTTP protocol only.')
168
163
169 pull_request = pr_util.create_pull_request()
164 pull_request = pr_util.create_pull_request()
170 target_vcs = pull_request.target_repo.scm_instance()
165 target_vcs = pull_request.target_repo.scm_instance()
171 source_vcs = pull_request.source_repo.scm_instance()
166 source_vcs = pull_request.source_repo.scm_instance()
172 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
167 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
173 pull_request)
168 pull_request)
174
169
175 # Pull from target and source references without without VCSServer
170 # Pull from target and source references without without VCSServer
176 # caching of mercurial repository objects but with active invalidation
171 # caching of mercurial repository objects but with active invalidation
177 # of RemoteRepo objects.
172 # of RemoteRepo objects.
178 # NOTE: Do not use patch.dict() to disable the cache because it
173 # NOTE: Do not use patch.dict() to disable the cache because it
179 # restores the WHOLE dict and not only the patched keys.
174 # restores the WHOLE dict and not only the patched keys.
180 shadow_repo._remote._wire['cache'] = False
175 shadow_repo._remote._wire['cache'] = False
181 shadow_repo._local_pull(target_vcs.path, target_ref)
176 shadow_repo._local_pull(target_vcs.path, target_ref)
182 shadow_repo._local_pull(source_vcs.path, source_ref)
177 shadow_repo._local_pull(source_vcs.path, source_ref)
183 shadow_repo._remote._wire.pop('cache')
178 shadow_repo._remote._wire.pop('cache')
184
179
185 # Try to lookup the target and source references in shadow repo. This
180 # Try to lookup the target and source references in shadow repo. This
186 # should work because the RemoteRepo object gets invalidated during the
181 # should work because the RemoteRepo object gets invalidated during the
187 # above pull operations.
182 # above pull operations.
188 shadow_repo.get_commit(target_ref.commit_id)
183 shadow_repo.get_commit(target_ref.commit_id)
189 shadow_repo.get_commit(source_ref.commit_id)
184 shadow_repo.get_commit(source_ref.commit_id)
@@ -1,83 +1,77 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
3 # #
4 ################################################################################
4 ################################################################################
5
5
6 [app:main]
6 [app:main]
7 use = egg:rhodecode-vcsserver
7 use = egg:rhodecode-vcsserver
8
8
9 pyramid.default_locale_name = en
9 pyramid.default_locale_name = en
10 pyramid.includes =
10 pyramid.includes =
11 pyramid.reload_templates = true
11 pyramid.reload_templates = true
12
12
13 # default locale used by VCS systems
13 # default locale used by VCS systems
14 locale = en_US.UTF-8
14 locale = en_US.UTF-8
15
15
16 # cache regions, please don't change
16 # cache regions, please don't change
17 beaker.cache.regions = repo_object
17 beaker.cache.regions = repo_object
18 beaker.cache.repo_object.type = memorylru
18 beaker.cache.repo_object.type = memorylru
19 beaker.cache.repo_object.max_items = 100
19 beaker.cache.repo_object.max_items = 100
20 # cache auto-expires after N seconds
20 # cache auto-expires after N seconds
21 beaker.cache.repo_object.expire = 300
21 beaker.cache.repo_object.expire = 300
22 beaker.cache.repo_object.enabled = true
22 beaker.cache.repo_object.enabled = true
23
23
24 [server:main]
24 [server:main]
25 use = egg:waitress#main
25 use = egg:waitress#main
26 host = 127.0.0.1
26 host = 127.0.0.1
27 port = 9900
27 port = 9900
28
28
29 ################################
29 ################################
30 ### LOGGING CONFIGURATION ####
30 ### LOGGING CONFIGURATION ####
31 ################################
31 ################################
32 [loggers]
32 [loggers]
33 keys = root, vcsserver, pyro4, beaker
33 keys = root, vcsserver, beaker
34
34
35 [handlers]
35 [handlers]
36 keys = console
36 keys = console
37
37
38 [formatters]
38 [formatters]
39 keys = generic
39 keys = generic
40
40
41 #############
41 #############
42 ## LOGGERS ##
42 ## LOGGERS ##
43 #############
43 #############
44 [logger_root]
44 [logger_root]
45 level = NOTSET
45 level = NOTSET
46 handlers = console
46 handlers = console
47
47
48 [logger_vcsserver]
48 [logger_vcsserver]
49 level = DEBUG
49 level = DEBUG
50 handlers =
50 handlers =
51 qualname = vcsserver
51 qualname = vcsserver
52 propagate = 1
52 propagate = 1
53
53
54 [logger_beaker]
54 [logger_beaker]
55 level = DEBUG
55 level = DEBUG
56 handlers =
56 handlers =
57 qualname = beaker
57 qualname = beaker
58 propagate = 1
58 propagate = 1
59
59
60 [logger_pyro4]
61 level = DEBUG
62 handlers =
63 qualname = Pyro4
64 propagate = 1
65
66
60
67 ##############
61 ##############
68 ## HANDLERS ##
62 ## HANDLERS ##
69 ##############
63 ##############
70
64
71 [handler_console]
65 [handler_console]
72 class = StreamHandler
66 class = StreamHandler
73 args = (sys.stderr,)
67 args = (sys.stderr,)
74 level = INFO
68 level = INFO
75 formatter = generic
69 formatter = generic
76
70
77 ################
71 ################
78 ## FORMATTERS ##
72 ## FORMATTERS ##
79 ################
73 ################
80
74
81 [formatter_generic]
75 [formatter_generic]
82 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
76 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
83 datefmt = %Y-%m-%d %H:%M:%S
77 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,255 +1,254 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 # Import early to make sure things are patched up properly
21 # Import early to make sure things are patched up properly
22 from setuptools import setup, find_packages
22 from setuptools import setup, find_packages
23
23
24 import os
24 import os
25 import sys
25 import sys
26 import pkgutil
26 import pkgutil
27 import platform
27 import platform
28
28
29 from pip.download import PipSession
29 from pip.download import PipSession
30 from pip.req import parse_requirements
30 from pip.req import parse_requirements
31
31
32 from codecs import open
32 from codecs import open
33
33
34
34
35 if sys.version_info < (2, 7):
35 if sys.version_info < (2, 7):
36 raise Exception('RhodeCode requires Python 2.7 or later')
36 raise Exception('RhodeCode requires Python 2.7 or later')
37
37
38 here = os.path.abspath(os.path.dirname(__file__))
38 here = os.path.abspath(os.path.dirname(__file__))
39
39
40 # defines current platform
40 # defines current platform
41 __platform__ = platform.system()
41 __platform__ = platform.system()
42 __license__ = 'AGPLv3, and Commercial License'
42 __license__ = 'AGPLv3, and Commercial License'
43 __author__ = 'RhodeCode GmbH'
43 __author__ = 'RhodeCode GmbH'
44 __url__ = 'https://code.rhodecode.com'
44 __url__ = 'https://code.rhodecode.com'
45 is_windows = __platform__ in ('Windows',)
45 is_windows = __platform__ in ('Windows',)
46
46
47
47
48 def _get_requirements(req_filename, exclude=None, extras=None):
48 def _get_requirements(req_filename, exclude=None, extras=None):
49 extras = extras or []
49 extras = extras or []
50 exclude = exclude or []
50 exclude = exclude or []
51
51
52 try:
52 try:
53 parsed = parse_requirements(
53 parsed = parse_requirements(
54 os.path.join(here, req_filename), session=PipSession())
54 os.path.join(here, req_filename), session=PipSession())
55 except TypeError:
55 except TypeError:
56 # try pip < 6.0.0, that doesn't support session
56 # try pip < 6.0.0, that doesn't support session
57 parsed = parse_requirements(os.path.join(here, req_filename))
57 parsed = parse_requirements(os.path.join(here, req_filename))
58
58
59 requirements = []
59 requirements = []
60 for ir in parsed:
60 for ir in parsed:
61 if ir.req and ir.name not in exclude:
61 if ir.req and ir.name not in exclude:
62 requirements.append(str(ir.req))
62 requirements.append(str(ir.req))
63 return requirements + extras
63 return requirements + extras
64
64
65
65
66 # requirements extract
66 # requirements extract
67 setup_requirements = ['PasteScript', 'pytest-runner']
67 setup_requirements = ['PasteScript', 'pytest-runner']
68 install_requirements = _get_requirements(
68 install_requirements = _get_requirements(
69 'requirements.txt', exclude=['setuptools'])
69 'requirements.txt', exclude=['setuptools'])
70 test_requirements = _get_requirements(
70 test_requirements = _get_requirements(
71 'requirements_test.txt', extras=['configobj'])
71 'requirements_test.txt', extras=['configobj'])
72
72
73 install_requirements = [
73 install_requirements = [
74 'Babel',
74 'Babel',
75 'Beaker',
75 'Beaker',
76 'FormEncode',
76 'FormEncode',
77 'Mako',
77 'Mako',
78 'Markdown',
78 'Markdown',
79 'MarkupSafe',
79 'MarkupSafe',
80 'MySQL-python',
80 'MySQL-python',
81 'Paste',
81 'Paste',
82 'PasteDeploy',
82 'PasteDeploy',
83 'PasteScript',
83 'PasteScript',
84 'Pygments',
84 'Pygments',
85 'pygments-markdown-lexer',
85 'pygments-markdown-lexer',
86 'Pylons',
86 'Pylons',
87 'Pyro4',
88 'Routes',
87 'Routes',
89 'SQLAlchemy',
88 'SQLAlchemy',
90 'Tempita',
89 'Tempita',
91 'URLObject',
90 'URLObject',
92 'WebError',
91 'WebError',
93 'WebHelpers',
92 'WebHelpers',
94 'WebHelpers2',
93 'WebHelpers2',
95 'WebOb',
94 'WebOb',
96 'WebTest',
95 'WebTest',
97 'Whoosh',
96 'Whoosh',
98 'alembic',
97 'alembic',
99 'amqplib',
98 'amqplib',
100 'anyjson',
99 'anyjson',
101 'appenlight-client',
100 'appenlight-client',
102 'authomatic',
101 'authomatic',
103 'backport_ipaddress',
102 'backport_ipaddress',
104 'celery',
103 'celery',
105 'channelstream',
104 'channelstream',
106 'colander',
105 'colander',
107 'decorator',
106 'decorator',
108 'deform',
107 'deform',
109 'docutils',
108 'docutils',
110 'gevent',
109 'gevent',
111 'gunicorn',
110 'gunicorn',
112 'infrae.cache',
111 'infrae.cache',
113 'ipython',
112 'ipython',
114 'iso8601',
113 'iso8601',
115 'kombu',
114 'kombu',
116 'msgpack-python',
115 'msgpack-python',
117 'packaging',
116 'packaging',
118 'psycopg2',
117 'psycopg2',
119 'py-gfm',
118 'py-gfm',
120 'pycrypto',
119 'pycrypto',
121 'pycurl',
120 'pycurl',
122 'pyparsing',
121 'pyparsing',
123 'pyramid',
122 'pyramid',
124 'pyramid-debugtoolbar',
123 'pyramid-debugtoolbar',
125 'pyramid-mako',
124 'pyramid-mako',
126 'pyramid-beaker',
125 'pyramid-beaker',
127 'pysqlite',
126 'pysqlite',
128 'python-dateutil',
127 'python-dateutil',
129 'python-ldap',
128 'python-ldap',
130 'python-memcached',
129 'python-memcached',
131 'python-pam',
130 'python-pam',
132 'recaptcha-client',
131 'recaptcha-client',
133 'repoze.lru',
132 'repoze.lru',
134 'requests',
133 'requests',
135 'simplejson',
134 'simplejson',
136 'subprocess32',
135 'subprocess32',
137 'waitress',
136 'waitress',
138 'zope.cachedescriptors',
137 'zope.cachedescriptors',
139 'dogpile.cache',
138 'dogpile.cache',
140 'dogpile.core',
139 'dogpile.core',
141 'psutil',
140 'psutil',
142 'py-bcrypt',
141 'py-bcrypt',
143 ]
142 ]
144
143
145
144
146 def get_version():
145 def get_version():
147 version = pkgutil.get_data('rhodecode', 'VERSION')
146 version = pkgutil.get_data('rhodecode', 'VERSION')
148 return version.strip()
147 return version.strip()
149
148
150
149
151 # additional files that goes into package itself
150 # additional files that goes into package itself
152 package_data = {
151 package_data = {
153 '': ['*.txt', '*.rst'],
152 '': ['*.txt', '*.rst'],
154 'configs': ['*.ini'],
153 'configs': ['*.ini'],
155 'rhodecode': ['VERSION', 'i18n/*/LC_MESSAGES/*.mo', ],
154 'rhodecode': ['VERSION', 'i18n/*/LC_MESSAGES/*.mo', ],
156 }
155 }
157
156
158 description = 'Source Code Management Platform'
157 description = 'Source Code Management Platform'
159 keywords = ' '.join([
158 keywords = ' '.join([
160 'rhodecode', 'mercurial', 'git', 'svn',
159 'rhodecode', 'mercurial', 'git', 'svn',
161 'code review',
160 'code review',
162 'repo groups', 'ldap', 'repository management', 'hgweb',
161 'repo groups', 'ldap', 'repository management', 'hgweb',
163 'hgwebdir', 'gitweb', 'serving hgweb',
162 'hgwebdir', 'gitweb', 'serving hgweb',
164 ])
163 ])
165
164
166
165
167 # README/DESCRIPTION generation
166 # README/DESCRIPTION generation
168 readme_file = 'README.rst'
167 readme_file = 'README.rst'
169 changelog_file = 'CHANGES.rst'
168 changelog_file = 'CHANGES.rst'
170 try:
169 try:
171 long_description = open(readme_file).read() + '\n\n' + \
170 long_description = open(readme_file).read() + '\n\n' + \
172 open(changelog_file).read()
171 open(changelog_file).read()
173 except IOError as err:
172 except IOError as err:
174 sys.stderr.write(
173 sys.stderr.write(
175 "[WARNING] Cannot find file specified as long_description (%s)\n "
174 "[WARNING] Cannot find file specified as long_description (%s)\n "
176 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
175 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
177 long_description = description
176 long_description = description
178
177
179
178
180 setup(
179 setup(
181 name='rhodecode-enterprise-ce',
180 name='rhodecode-enterprise-ce',
182 version=get_version(),
181 version=get_version(),
183 description=description,
182 description=description,
184 long_description=long_description,
183 long_description=long_description,
185 keywords=keywords,
184 keywords=keywords,
186 license=__license__,
185 license=__license__,
187 author=__author__,
186 author=__author__,
188 author_email='marcin@rhodecode.com',
187 author_email='marcin@rhodecode.com',
189 url=__url__,
188 url=__url__,
190 setup_requires=setup_requirements,
189 setup_requires=setup_requirements,
191 install_requires=install_requirements,
190 install_requires=install_requirements,
192 tests_require=test_requirements,
191 tests_require=test_requirements,
193 zip_safe=False,
192 zip_safe=False,
194 packages=find_packages(exclude=["docs", "tests*"]),
193 packages=find_packages(exclude=["docs", "tests*"]),
195 package_data=package_data,
194 package_data=package_data,
196 include_package_data=True,
195 include_package_data=True,
197 classifiers=[
196 classifiers=[
198 'Development Status :: 6 - Mature',
197 'Development Status :: 6 - Mature',
199 'Environment :: Web Environment',
198 'Environment :: Web Environment',
200 'Intended Audience :: Developers',
199 'Intended Audience :: Developers',
201 'Operating System :: OS Independent',
200 'Operating System :: OS Independent',
202 'Topic :: Software Development :: Version Control',
201 'Topic :: Software Development :: Version Control',
203 'License :: OSI Approved :: Affero GNU General Public License v3 or later (AGPLv3+)',
202 'License :: OSI Approved :: Affero GNU General Public License v3 or later (AGPLv3+)',
204 'Programming Language :: Python :: 2.7',
203 'Programming Language :: Python :: 2.7',
205 ],
204 ],
206 message_extractors={
205 message_extractors={
207 'rhodecode': [
206 'rhodecode': [
208 ('**.py', 'python', None),
207 ('**.py', 'python', None),
209 ('**.js', 'javascript', None),
208 ('**.js', 'javascript', None),
210 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
209 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
211 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
210 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
212 ('public/**', 'ignore', None),
211 ('public/**', 'ignore', None),
213 ]
212 ]
214 },
213 },
215 paster_plugins=['PasteScript', 'Pylons'],
214 paster_plugins=['PasteScript', 'Pylons'],
216 entry_points={
215 entry_points={
217 'enterprise.plugins1': [
216 'enterprise.plugins1': [
218 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
217 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
219 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
218 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
220 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
219 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
221 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
220 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
222 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
221 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
223 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
222 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
224 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
223 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
225 ],
224 ],
226 'paste.app_factory': [
225 'paste.app_factory': [
227 'main=rhodecode.config.middleware:make_pyramid_app',
226 'main=rhodecode.config.middleware:make_pyramid_app',
228 'pylons=rhodecode.config.middleware:make_app',
227 'pylons=rhodecode.config.middleware:make_app',
229 ],
228 ],
230 'paste.app_install': [
229 'paste.app_install': [
231 'main=pylons.util:PylonsInstaller',
230 'main=pylons.util:PylonsInstaller',
232 'pylons=pylons.util:PylonsInstaller',
231 'pylons=pylons.util:PylonsInstaller',
233 ],
232 ],
234 'paste.global_paster_command': [
233 'paste.global_paster_command': [
235 'make-config=rhodecode.lib.paster_commands.make_config:Command',
234 'make-config=rhodecode.lib.paster_commands.make_config:Command',
236 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
235 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
237 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
236 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
238 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
237 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
239 'ishell=rhodecode.lib.paster_commands.ishell:Command',
238 'ishell=rhodecode.lib.paster_commands.ishell:Command',
240 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
239 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
241 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
240 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
242 ],
241 ],
243 'pytest11': [
242 'pytest11': [
244 'pylons=rhodecode.tests.pylons_plugin',
243 'pylons=rhodecode.tests.pylons_plugin',
245 'enterprise=rhodecode.tests.plugin',
244 'enterprise=rhodecode.tests.plugin',
246 ],
245 ],
247 'console_scripts': [
246 'console_scripts': [
248 'rcserver=rhodecode.rcserver:main',
247 'rcserver=rhodecode.rcserver:main',
249 ],
248 ],
250 'beaker.backends': [
249 'beaker.backends': [
251 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
250 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
252 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
251 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
253 ]
252 ]
254 },
253 },
255 )
254 )
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now