Show More
@@ -1,681 +1,678 b'' | |||
|
1 | 1 | |
|
2 | 2 | |
|
3 | 3 | ################################################################################ |
|
4 | 4 | ## RHODECODE COMMUNITY EDITION CONFIGURATION ## |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ## EMAIL CONFIGURATION ## |
|
13 | 13 | ## Uncomment and replace with the email address which should receive ## |
|
14 | 14 | ## any error reports after an application crash ## |
|
15 | 15 | ## Additionally these settings will be used by the RhodeCode mailing system ## |
|
16 | 16 | ################################################################################ |
|
17 | 17 | |
|
18 | 18 | ## prefix all emails subjects with given prefix, helps filtering out emails |
|
19 | 19 | #email_prefix = [RhodeCode] |
|
20 | 20 | |
|
21 | 21 | ## email FROM address all mails will be sent |
|
22 | 22 | #app_email_from = rhodecode-noreply@localhost |
|
23 | 23 | |
|
24 | 24 | ## Uncomment and replace with the address which should receive any error report |
|
25 | 25 | ## note: using appenlight for error handling doesn't need this to be uncommented |
|
26 | 26 | #email_to = admin@localhost |
|
27 | 27 | |
|
28 | 28 | ## in case of Application errors, sent an error email form |
|
29 | 29 | #error_email_from = rhodecode_error@localhost |
|
30 | 30 | |
|
31 | 31 | ## additional error message to be send in case of server crash |
|
32 | 32 | #error_message = |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | #smtp_server = mail.server.com |
|
36 | 36 | #smtp_username = |
|
37 | 37 | #smtp_password = |
|
38 | 38 | #smtp_port = |
|
39 | 39 | #smtp_use_tls = false |
|
40 | 40 | #smtp_use_ssl = true |
|
41 | 41 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
42 | 42 | #smtp_auth = |
|
43 | 43 | |
|
44 | 44 | [server:main] |
|
45 | 45 | ## COMMON ## |
|
46 | 46 | host = 127.0.0.1 |
|
47 | 47 | port = 5000 |
|
48 | 48 | |
|
49 | 49 | ################################## |
|
50 | 50 | ## WAITRESS WSGI SERVER ## |
|
51 | 51 | ## Recommended for Development ## |
|
52 | 52 | ################################## |
|
53 | 53 | |
|
54 | 54 | use = egg:waitress#main |
|
55 | 55 | ## number of worker threads |
|
56 | 56 | threads = 5 |
|
57 | 57 | ## MAX BODY SIZE 100GB |
|
58 | 58 | max_request_body_size = 107374182400 |
|
59 | 59 | ## Use poll instead of select, fixes file descriptors limits problems. |
|
60 | 60 | ## May not work on old windows systems. |
|
61 | 61 | asyncore_use_poll = true |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | ########################## |
|
65 | 65 | ## GUNICORN WSGI SERVER ## |
|
66 | 66 | ########################## |
|
67 | 67 | ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini |
|
68 | 68 | |
|
69 | 69 | #use = egg:gunicorn#main |
|
70 | 70 | ## Sets the number of process workers. You must set `instance_id = *` |
|
71 | 71 | ## when this option is set to more than one worker, recommended |
|
72 | 72 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers |
|
73 | 73 | ## The `instance_id = *` must be set in the [app:main] section below |
|
74 | 74 | #workers = 2 |
|
75 | 75 | ## number of threads for each of the worker, must be set to 1 for gevent |
|
76 | 76 | ## generally recommened to be at 1 |
|
77 | 77 | #threads = 1 |
|
78 | 78 | ## process name |
|
79 | 79 | #proc_name = rhodecode |
|
80 | 80 | ## type of worker class, one of sync, gevent |
|
81 | 81 | ## recommended for bigger setup is using of of other than sync one |
|
82 | 82 | #worker_class = sync |
|
83 | 83 | ## The maximum number of simultaneous clients. Valid only for Gevent |
|
84 | 84 | #worker_connections = 10 |
|
85 | 85 | ## max number of requests that worker will handle before being gracefully |
|
86 | 86 | ## restarted, could prevent memory leaks |
|
87 | 87 | #max_requests = 1000 |
|
88 | 88 | #max_requests_jitter = 30 |
|
89 | 89 | ## amount of time a worker can spend with handling a request before it |
|
90 | 90 | ## gets killed and restarted. Set to 6hrs |
|
91 | 91 | #timeout = 21600 |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | ## prefix middleware for RhodeCode. |
|
95 | 95 | ## recommended when using proxy setup. |
|
96 | 96 | ## allows to set RhodeCode under a prefix in server. |
|
97 | 97 | ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
98 | 98 | ## And set your prefix like: `prefix = /custom_prefix` |
|
99 | 99 | ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
100 | 100 | ## to make your cookies only work on prefix url |
|
101 | 101 | [filter:proxy-prefix] |
|
102 | 102 | use = egg:PasteDeploy#prefix |
|
103 | 103 | prefix = / |
|
104 | 104 | |
|
105 | 105 | [app:main] |
|
106 | 106 | use = egg:rhodecode-enterprise-ce |
|
107 | 107 | |
|
108 | 108 | ## enable proxy prefix middleware, defined above |
|
109 | 109 | #filter-with = proxy-prefix |
|
110 | 110 | |
|
111 | 111 | # During development the we want to have the debug toolbar enabled |
|
112 | 112 | pyramid.includes = |
|
113 | 113 | pyramid_debugtoolbar |
|
114 | 114 | rhodecode.utils.debugtoolbar |
|
115 | 115 | rhodecode.lib.middleware.request_wrapper |
|
116 | 116 | |
|
117 | 117 | pyramid.reload_templates = true |
|
118 | 118 | |
|
119 | 119 | debugtoolbar.hosts = 0.0.0.0/0 |
|
120 | 120 | debugtoolbar.exclude_prefixes = |
|
121 | 121 | /css |
|
122 | 122 | /fonts |
|
123 | 123 | /images |
|
124 | 124 | /js |
|
125 | 125 | |
|
126 | 126 | ## RHODECODE PLUGINS ## |
|
127 | 127 | rhodecode.includes = |
|
128 | 128 | rhodecode.api |
|
129 | 129 | |
|
130 | 130 | |
|
131 | 131 | # api prefix url |
|
132 | 132 | rhodecode.api.url = /_admin/api |
|
133 | 133 | |
|
134 | 134 | |
|
135 | 135 | ## END RHODECODE PLUGINS ## |
|
136 | 136 | |
|
137 | 137 | ## encryption key used to encrypt social plugin tokens, |
|
138 | 138 | ## remote_urls with credentials etc, if not set it defaults to |
|
139 | 139 | ## `beaker.session.secret` |
|
140 | 140 | #rhodecode.encrypted_values.secret = |
|
141 | 141 | |
|
142 | 142 | ## decryption strict mode (enabled by default). It controls if decryption raises |
|
143 | 143 | ## `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
144 | 144 | #rhodecode.encrypted_values.strict = false |
|
145 | 145 | |
|
146 | 146 | ## return gzipped responses from Rhodecode (static files/application) |
|
147 | 147 | gzip_responses = false |
|
148 | 148 | |
|
149 | 149 | ## autogenerate javascript routes file on startup |
|
150 | 150 | generate_js_files = false |
|
151 | 151 | |
|
152 | 152 | ## Optional Languages |
|
153 | 153 | ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
154 | 154 | lang = en |
|
155 | 155 | |
|
156 | 156 | ## perform a full repository scan on each server start, this should be |
|
157 | 157 | ## set to false after first startup, to allow faster server restarts. |
|
158 | 158 | startup.import_repos = false |
|
159 | 159 | |
|
160 | 160 | ## Uncomment and set this path to use archive download cache. |
|
161 | 161 | ## Once enabled, generated archives will be cached at this location |
|
162 | 162 | ## and served from the cache during subsequent requests for the same archive of |
|
163 | 163 | ## the repository. |
|
164 | 164 | #archive_cache_dir = /tmp/tarballcache |
|
165 | 165 | |
|
166 | 166 | ## change this to unique ID for security |
|
167 | 167 | app_instance_uuid = rc-production |
|
168 | 168 | |
|
169 | 169 | ## cut off limit for large diffs (size in bytes) |
|
170 | 170 | cut_off_limit_diff = 1024000 |
|
171 | 171 | cut_off_limit_file = 256000 |
|
172 | 172 | |
|
173 | 173 | ## use cache version of scm repo everywhere |
|
174 | 174 | vcs_full_cache = true |
|
175 | 175 | |
|
176 | 176 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
177 | 177 | ## Normally this is controlled by proper http flags sent from http server |
|
178 | 178 | force_https = false |
|
179 | 179 | |
|
180 | 180 | ## use Strict-Transport-Security headers |
|
181 | 181 | use_htsts = false |
|
182 | 182 | |
|
183 | 183 | ## number of commits stats will parse on each iteration |
|
184 | 184 | commit_parse_limit = 25 |
|
185 | 185 | |
|
186 | 186 | ## git rev filter option, --all is the default filter, if you need to |
|
187 | 187 | ## hide all refs in changelog switch this to --branches --tags |
|
188 | 188 | git_rev_filter = --branches --tags |
|
189 | 189 | |
|
190 | 190 | # Set to true if your repos are exposed using the dumb protocol |
|
191 | 191 | git_update_server_info = false |
|
192 | 192 | |
|
193 | 193 | ## RSS/ATOM feed options |
|
194 | 194 | rss_cut_off_limit = 256000 |
|
195 | 195 | rss_items_per_page = 10 |
|
196 | 196 | rss_include_diff = false |
|
197 | 197 | |
|
198 | 198 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
199 | 199 | ## url that does rewrites to _admin/gists/{gistid}. |
|
200 | 200 | ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
201 | 201 | ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
202 | 202 | gist_alias_url = |
|
203 | 203 | |
|
204 | 204 | ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be |
|
205 | 205 | ## used for access. |
|
206 | 206 | ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
207 | 207 | ## came from the the logged in user who own this authentication token. |
|
208 | 208 | ## |
|
209 | 209 | ## Syntax is ControllerClass:function_pattern. |
|
210 | 210 | ## To enable access to raw_files put `FilesController:raw`. |
|
211 | 211 | ## To enable access to patches add `ChangesetController:changeset_patch`. |
|
212 | 212 | ## The list should be "," separated and on a single line. |
|
213 | 213 | ## |
|
214 | 214 | ## Recommended controllers to enable: |
|
215 | 215 | # ChangesetController:changeset_patch, |
|
216 | 216 | # ChangesetController:changeset_raw, |
|
217 | 217 | # FilesController:raw, |
|
218 | 218 | # FilesController:archivefile, |
|
219 | 219 | # GistsController:*, |
|
220 | 220 | api_access_controllers_whitelist = |
|
221 | 221 | |
|
222 | 222 | ## default encoding used to convert from and to unicode |
|
223 | 223 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
224 | 224 | default_encoding = UTF-8 |
|
225 | 225 | |
|
226 | 226 | ## instance-id prefix |
|
227 | 227 | ## a prefix key for this instance used for cache invalidation when running |
|
228 | 228 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
229 | 229 | ## all running rhodecode instances. Leave empty if you don't use it |
|
230 | 230 | instance_id = |
|
231 | 231 | |
|
232 | 232 | ## Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
233 | 233 | ## of an authentication plugin also if it is disabled by it's settings. |
|
234 | 234 | ## This could be useful if you are unable to log in to the system due to broken |
|
235 | 235 | ## authentication settings. Then you can enable e.g. the internal rhodecode auth |
|
236 | 236 | ## module to log in again and fix the settings. |
|
237 | 237 | ## |
|
238 | 238 | ## Available builtin plugin IDs (hash is part of the ID): |
|
239 | 239 | ## egg:rhodecode-enterprise-ce#rhodecode |
|
240 | 240 | ## egg:rhodecode-enterprise-ce#pam |
|
241 | 241 | ## egg:rhodecode-enterprise-ce#ldap |
|
242 | 242 | ## egg:rhodecode-enterprise-ce#jasig_cas |
|
243 | 243 | ## egg:rhodecode-enterprise-ce#headers |
|
244 | 244 | ## egg:rhodecode-enterprise-ce#crowd |
|
245 | 245 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
246 | 246 | |
|
247 | 247 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
248 | 248 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
249 | 249 | ## handling that causing a series of failed authentication calls. |
|
250 | 250 | ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
251 | 251 | ## This will be served instead of default 401 on bad authnetication |
|
252 | 252 | auth_ret_code = |
|
253 | 253 | |
|
254 | 254 | ## use special detection method when serving auth_ret_code, instead of serving |
|
255 | 255 | ## ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
256 | 256 | ## and then serve auth_ret_code to clients |
|
257 | 257 | auth_ret_code_detection = false |
|
258 | 258 | |
|
259 | 259 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
260 | 260 | ## codes don't break the transactions while 4XX codes do |
|
261 | 261 | lock_ret_code = 423 |
|
262 | 262 | |
|
263 | 263 | ## allows to change the repository location in settings page |
|
264 | 264 | allow_repo_location_change = true |
|
265 | 265 | |
|
266 | 266 | ## allows to setup custom hooks in settings page |
|
267 | 267 | allow_custom_hooks_settings = true |
|
268 | 268 | |
|
269 | 269 | ## generated license token, goto license page in RhodeCode settings to obtain |
|
270 | 270 | ## new token |
|
271 | 271 | license_token = |
|
272 | 272 | |
|
273 | 273 | ## supervisor connection uri, for managing supervisor and logs. |
|
274 | 274 | supervisor.uri = |
|
275 | 275 | ## supervisord group name/id we only want this RC instance to handle |
|
276 | 276 | supervisor.group_id = dev |
|
277 | 277 | |
|
278 | 278 | ## Display extended labs settings |
|
279 | 279 | labs_settings_active = true |
|
280 | 280 | |
|
281 | 281 | #################################### |
|
282 | 282 | ### CELERY CONFIG #### |
|
283 | 283 | #################################### |
|
284 | 284 | use_celery = false |
|
285 | 285 | broker.host = localhost |
|
286 | 286 | broker.vhost = rabbitmqhost |
|
287 | 287 | broker.port = 5672 |
|
288 | 288 | broker.user = rabbitmq |
|
289 | 289 | broker.password = qweqwe |
|
290 | 290 | |
|
291 | 291 | celery.imports = rhodecode.lib.celerylib.tasks |
|
292 | 292 | |
|
293 | 293 | celery.result.backend = amqp |
|
294 | 294 | celery.result.dburi = amqp:// |
|
295 | 295 | celery.result.serialier = json |
|
296 | 296 | |
|
297 | 297 | #celery.send.task.error.emails = true |
|
298 | 298 | #celery.amqp.task.result.expires = 18000 |
|
299 | 299 | |
|
300 | 300 | celeryd.concurrency = 2 |
|
301 | 301 | #celeryd.log.file = celeryd.log |
|
302 | 302 | celeryd.log.level = debug |
|
303 | 303 | celeryd.max.tasks.per.child = 1 |
|
304 | 304 | |
|
305 | 305 | ## tasks will never be sent to the queue, but executed locally instead. |
|
306 | 306 | celery.always.eager = false |
|
307 | 307 | |
|
308 | 308 | #################################### |
|
309 | 309 | ### BEAKER CACHE #### |
|
310 | 310 | #################################### |
|
311 | 311 | # default cache dir for templates. Putting this into a ramdisk |
|
312 | 312 | ## can boost performance, eg. %(here)s/data_ramdisk |
|
313 | 313 | cache_dir = %(here)s/data |
|
314 | 314 | |
|
315 | 315 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
316 | 316 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
317 | 317 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
318 | 318 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
319 | 319 | |
|
320 | 320 | beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long |
|
321 | 321 | |
|
322 | 322 | beaker.cache.super_short_term.type = memory |
|
323 | 323 | beaker.cache.super_short_term.expire = 10 |
|
324 | 324 | beaker.cache.super_short_term.key_length = 256 |
|
325 | 325 | |
|
326 | 326 | beaker.cache.short_term.type = memory |
|
327 | 327 | beaker.cache.short_term.expire = 60 |
|
328 | 328 | beaker.cache.short_term.key_length = 256 |
|
329 | 329 | |
|
330 | 330 | beaker.cache.long_term.type = memory |
|
331 | 331 | beaker.cache.long_term.expire = 36000 |
|
332 | 332 | beaker.cache.long_term.key_length = 256 |
|
333 | 333 | |
|
334 | 334 | beaker.cache.sql_cache_short.type = memory |
|
335 | 335 | beaker.cache.sql_cache_short.expire = 10 |
|
336 | 336 | beaker.cache.sql_cache_short.key_length = 256 |
|
337 | 337 | |
|
338 | 338 | ## default is memory cache, configure only if required |
|
339 | 339 | ## using multi-node or multi-worker setup |
|
340 | 340 | #beaker.cache.auth_plugins.type = ext:database |
|
341 | 341 | #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock |
|
342 | 342 | #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode |
|
343 | 343 | #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode |
|
344 | 344 | #beaker.cache.auth_plugins.sa.pool_recycle = 3600 |
|
345 | 345 | #beaker.cache.auth_plugins.sa.pool_size = 10 |
|
346 | 346 | #beaker.cache.auth_plugins.sa.max_overflow = 0 |
|
347 | 347 | |
|
348 | 348 | beaker.cache.repo_cache_long.type = memorylru_base |
|
349 | 349 | beaker.cache.repo_cache_long.max_items = 4096 |
|
350 | 350 | beaker.cache.repo_cache_long.expire = 2592000 |
|
351 | 351 | |
|
352 | 352 | ## default is memorylru_base cache, configure only if required |
|
353 | 353 | ## using multi-node or multi-worker setup |
|
354 | 354 | #beaker.cache.repo_cache_long.type = ext:memcached |
|
355 | 355 | #beaker.cache.repo_cache_long.url = localhost:11211 |
|
356 | 356 | #beaker.cache.repo_cache_long.expire = 1209600 |
|
357 | 357 | #beaker.cache.repo_cache_long.key_length = 256 |
|
358 | 358 | |
|
359 | 359 | #################################### |
|
360 | 360 | ### BEAKER SESSION #### |
|
361 | 361 | #################################### |
|
362 | 362 | |
|
363 | 363 | ## .session.type is type of storage options for the session, current allowed |
|
364 | 364 | ## types are file, ext:memcached, ext:database, and memory (default). |
|
365 | 365 | beaker.session.type = file |
|
366 | 366 | beaker.session.data_dir = %(here)s/data/sessions/data |
|
367 | 367 | |
|
368 | 368 | ## db based session, fast, and allows easy management over logged in users |
|
369 | 369 | #beaker.session.type = ext:database |
|
370 | 370 | #beaker.session.table_name = db_session |
|
371 | 371 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
372 | 372 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
373 | 373 | #beaker.session.sa.pool_recycle = 3600 |
|
374 | 374 | #beaker.session.sa.echo = false |
|
375 | 375 | |
|
376 | 376 | beaker.session.key = rhodecode |
|
377 | 377 | beaker.session.secret = develop-rc-uytcxaz |
|
378 | 378 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
379 | 379 | |
|
380 | 380 | ## Secure encrypted cookie. Requires AES and AES python libraries |
|
381 | 381 | ## you must disable beaker.session.secret to use this |
|
382 | 382 | #beaker.session.encrypt_key = key_for_encryption |
|
383 | 383 | #beaker.session.validate_key = validation_key |
|
384 | 384 | |
|
385 | 385 | ## sets session as invalid(also logging out user) if it haven not been |
|
386 | 386 | ## accessed for given amount of time in seconds |
|
387 | 387 | beaker.session.timeout = 2592000 |
|
388 | 388 | beaker.session.httponly = true |
|
389 | 389 | ## Path to use for the cookie. Set to prefix if you use prefix middleware |
|
390 | 390 | #beaker.session.cookie_path = /custom_prefix |
|
391 | 391 | |
|
392 | 392 | ## uncomment for https secure cookie |
|
393 | 393 | beaker.session.secure = false |
|
394 | 394 | |
|
395 | 395 | ## auto save the session to not to use .save() |
|
396 | 396 | beaker.session.auto = false |
|
397 | 397 | |
|
398 | 398 | ## default cookie expiration time in seconds, set to `true` to set expire |
|
399 | 399 | ## at browser close |
|
400 | 400 | #beaker.session.cookie_expires = 3600 |
|
401 | 401 | |
|
402 | 402 | ################################### |
|
403 | 403 | ## SEARCH INDEXING CONFIGURATION ## |
|
404 | 404 | ################################### |
|
405 | 405 | ## Full text search indexer is available in rhodecode-tools under |
|
406 | 406 | ## `rhodecode-tools index` command |
|
407 | 407 | |
|
408 | 408 | ## WHOOSH Backend, doesn't require additional services to run |
|
409 | 409 | ## it works good with few dozen repos |
|
410 | 410 | search.module = rhodecode.lib.index.whoosh |
|
411 | 411 | search.location = %(here)s/data/index |
|
412 | 412 | |
|
413 | 413 | ######################################## |
|
414 | 414 | ### CHANNELSTREAM CONFIG #### |
|
415 | 415 | ######################################## |
|
416 | 416 | ## channelstream enables persistent connections and live notification |
|
417 | 417 | ## in the system. It's also used by the chat system |
|
418 | 418 | channelstream.enabled = false |
|
419 | 419 | |
|
420 | 420 | ## server address for channelstream server on the backend |
|
421 | 421 | channelstream.server = 127.0.0.1:9800 |
|
422 | 422 | |
|
423 | 423 | ## location of the channelstream server from outside world |
|
424 | 424 | ## use ws:// for http or wss:// for https. This address needs to be handled |
|
425 | 425 | ## by external HTTP server such as Nginx or Apache |
|
426 | 426 | ## see nginx/apache configuration examples in our docs |
|
427 | 427 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
428 | 428 | channelstream.secret = secret |
|
429 | 429 | channelstream.history.location = %(here)s/channelstream_history |
|
430 | 430 | |
|
431 | 431 | ## Internal application path that Javascript uses to connect into. |
|
432 | 432 | ## If you use proxy-prefix the prefix should be added before /_channelstream |
|
433 | 433 | channelstream.proxy_path = /_channelstream |
|
434 | 434 | |
|
435 | 435 | |
|
436 | 436 | ################################### |
|
437 | 437 | ## APPENLIGHT CONFIG ## |
|
438 | 438 | ################################### |
|
439 | 439 | |
|
440 | 440 | ## Appenlight is tailored to work with RhodeCode, see |
|
441 | 441 | ## http://appenlight.com for details how to obtain an account |
|
442 | 442 | |
|
443 | 443 | ## appenlight integration enabled |
|
444 | 444 | appenlight = false |
|
445 | 445 | |
|
446 | 446 | appenlight.server_url = https://api.appenlight.com |
|
447 | 447 | appenlight.api_key = YOUR_API_KEY |
|
448 | 448 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
449 | 449 | |
|
450 | 450 | # used for JS client |
|
451 | 451 | appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
452 | 452 | |
|
453 | 453 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
454 | 454 | |
|
455 | 455 | ## enables 404 error logging (default False) |
|
456 | 456 | appenlight.report_404 = false |
|
457 | 457 | |
|
458 | 458 | ## time in seconds after request is considered being slow (default 1) |
|
459 | 459 | appenlight.slow_request_time = 1 |
|
460 | 460 | |
|
461 | 461 | ## record slow requests in application |
|
462 | 462 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
463 | 463 | appenlight.slow_requests = true |
|
464 | 464 | |
|
465 | 465 | ## enable hooking to application loggers |
|
466 | 466 | appenlight.logging = true |
|
467 | 467 | |
|
468 | 468 | ## minimum log level for log capture |
|
469 | 469 | appenlight.logging.level = WARNING |
|
470 | 470 | |
|
471 | 471 | ## send logs only from erroneous/slow requests |
|
472 | 472 | ## (saves API quota for intensive logging) |
|
473 | 473 | appenlight.logging_on_error = false |
|
474 | 474 | |
|
475 | 475 | ## list of additonal keywords that should be grabbed from environ object |
|
476 | 476 | ## can be string with comma separated list of words in lowercase |
|
477 | 477 | ## (by default client will always send following info: |
|
478 | 478 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
479 | 479 | ## start with HTTP* this list be extended with additional keywords here |
|
480 | 480 | appenlight.environ_keys_whitelist = |
|
481 | 481 | |
|
482 | 482 | ## list of keywords that should be blanked from request object |
|
483 | 483 | ## can be string with comma separated list of words in lowercase |
|
484 | 484 | ## (by default client will always blank keys that contain following words |
|
485 | 485 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
486 | 486 | ## this list be extended with additional keywords set here |
|
487 | 487 | appenlight.request_keys_blacklist = |
|
488 | 488 | |
|
489 | 489 | ## list of namespaces that should be ignores when gathering log entries |
|
490 | 490 | ## can be string with comma separated list of namespaces |
|
491 | 491 | ## (by default the client ignores own entries: appenlight_client.client) |
|
492 | 492 | appenlight.log_namespace_blacklist = |
|
493 | 493 | |
|
494 | 494 | |
|
495 | 495 | ################################################################################ |
|
496 | 496 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
497 | 497 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
498 | 498 | ## execute malicious code after an exception is raised. ## |
|
499 | 499 | ################################################################################ |
|
500 | 500 | #set debug = false |
|
501 | 501 | |
|
502 | 502 | |
|
503 | 503 | ############## |
|
504 | 504 | ## STYLING ## |
|
505 | 505 | ############## |
|
506 | 506 | debug_style = true |
|
507 | 507 | |
|
508 | 508 | ########################################### |
|
509 | 509 | ### MAIN RHODECODE DATABASE CONFIG ### |
|
510 | 510 | ########################################### |
|
511 | 511 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
512 | 512 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
513 | 513 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode |
|
514 | 514 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
515 | 515 | |
|
516 | 516 | # see sqlalchemy docs for other advanced settings |
|
517 | 517 | |
|
518 | 518 | ## print the sql statements to output |
|
519 | 519 | sqlalchemy.db1.echo = false |
|
520 | 520 | ## recycle the connections after this amount of seconds |
|
521 | 521 | sqlalchemy.db1.pool_recycle = 3600 |
|
522 | 522 | sqlalchemy.db1.convert_unicode = true |
|
523 | 523 | |
|
524 | 524 | ## the number of connections to keep open inside the connection pool. |
|
525 | 525 | ## 0 indicates no limit |
|
526 | 526 | #sqlalchemy.db1.pool_size = 5 |
|
527 | 527 | |
|
528 | 528 | ## the number of connections to allow in connection pool "overflow", that is |
|
529 | 529 | ## connections that can be opened above and beyond the pool_size setting, |
|
530 | 530 | ## which defaults to five. |
|
531 | 531 | #sqlalchemy.db1.max_overflow = 10 |
|
532 | 532 | |
|
533 | 533 | |
|
534 | 534 | ################## |
|
535 | 535 | ### VCS CONFIG ### |
|
536 | 536 | ################## |
|
537 | 537 | vcs.server.enable = true |
|
538 | 538 | vcs.server = localhost:9900 |
|
539 | 539 | |
|
540 | 540 | ## Web server connectivity protocol, responsible for web based VCS operatations |
|
541 | 541 | ## Available protocols are: |
|
542 | ## `pyro4` - use pyro4 server | |
|
543 | 542 | ## `http` - use http-rpc backend (default) |
|
544 | 543 | vcs.server.protocol = http |
|
545 | 544 | |
|
546 | 545 | ## Push/Pull operations protocol, available options are: |
|
547 | ## `pyro4` - use pyro4 server | |
|
548 | 546 | ## `http` - use http-rpc backend (default) |
|
549 | 547 | ## |
|
550 | 548 | vcs.scm_app_implementation = http |
|
551 | 549 | |
|
552 | 550 | ## Push/Pull operations hooks protocol, available options are: |
|
553 | ## `pyro4` - use pyro4 server | |
|
554 | 551 | ## `http` - use http-rpc backend (default) |
|
555 | 552 | vcs.hooks.protocol = http |
|
556 | 553 | |
|
557 | 554 | vcs.server.log_level = debug |
|
558 | 555 | ## Start VCSServer with this instance as a subprocess, usefull for development |
|
559 | 556 | vcs.start_server = true |
|
560 | 557 | |
|
561 | 558 | ## List of enabled VCS backends, available options are: |
|
562 | 559 | ## `hg` - mercurial |
|
563 | 560 | ## `git` - git |
|
564 | 561 | ## `svn` - subversion |
|
565 | 562 | vcs.backends = hg, git, svn |
|
566 | 563 | |
|
567 | 564 | vcs.connection_timeout = 3600 |
|
568 | 565 | ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
569 | 566 | ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible |
|
570 | 567 | #vcs.svn.compatible_version = pre-1.8-compatible |
|
571 | 568 | |
|
572 | 569 | |
|
573 | 570 | ############################################################ |
|
574 | 571 | ### Subversion proxy support (mod_dav_svn) ### |
|
575 | 572 | ### Maps RhodeCode repo groups into SVN paths for Apache ### |
|
576 | 573 | ############################################################ |
|
577 | 574 | ## Enable or disable the config file generation. |
|
578 | 575 | svn.proxy.generate_config = false |
|
579 | 576 | ## Generate config file with `SVNListParentPath` set to `On`. |
|
580 | 577 | svn.proxy.list_parent_path = true |
|
581 | 578 | ## Set location and file name of generated config file. |
|
582 | 579 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
583 | 580 | ## Used as a prefix to the `Location` block in the generated config file. |
|
584 | 581 | ## In most cases it should be set to `/`. |
|
585 | 582 | svn.proxy.location_root = / |
|
586 | 583 | ## Command to reload the mod dav svn configuration on change. |
|
587 | 584 | ## Example: `/etc/init.d/apache2 reload` |
|
588 | 585 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
589 | 586 | ## If the timeout expires before the reload command finishes, the command will |
|
590 | 587 | ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
591 | 588 | #svn.proxy.reload_timeout = 10 |
|
592 | 589 | |
|
593 | 590 | ## Dummy marker to add new entries after. |
|
594 | 591 | ## Add any custom entries below. Please don't remove. |
|
595 | 592 | custom.conf = 1 |
|
596 | 593 | |
|
597 | 594 | |
|
598 | 595 | ################################ |
|
599 | 596 | ### LOGGING CONFIGURATION #### |
|
600 | 597 | ################################ |
|
601 | 598 | [loggers] |
|
602 | 599 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates |
|
603 | 600 | |
|
604 | 601 | [handlers] |
|
605 | 602 | keys = console, console_sql |
|
606 | 603 | |
|
607 | 604 | [formatters] |
|
608 | 605 | keys = generic, color_formatter, color_formatter_sql |
|
609 | 606 | |
|
610 | 607 | ############# |
|
611 | 608 | ## LOGGERS ## |
|
612 | 609 | ############# |
|
613 | 610 | [logger_root] |
|
614 | 611 | level = NOTSET |
|
615 | 612 | handlers = console |
|
616 | 613 | |
|
617 | 614 | [logger_routes] |
|
618 | 615 | level = DEBUG |
|
619 | 616 | handlers = |
|
620 | 617 | qualname = routes.middleware |
|
621 | 618 | ## "level = DEBUG" logs the route matched and routing variables. |
|
622 | 619 | propagate = 1 |
|
623 | 620 | |
|
624 | 621 | [logger_beaker] |
|
625 | 622 | level = DEBUG |
|
626 | 623 | handlers = |
|
627 | 624 | qualname = beaker.container |
|
628 | 625 | propagate = 1 |
|
629 | 626 | |
|
630 | 627 | [logger_templates] |
|
631 | 628 | level = INFO |
|
632 | 629 | handlers = |
|
633 | 630 | qualname = pylons.templating |
|
634 | 631 | propagate = 1 |
|
635 | 632 | |
|
636 | 633 | [logger_rhodecode] |
|
637 | 634 | level = DEBUG |
|
638 | 635 | handlers = |
|
639 | 636 | qualname = rhodecode |
|
640 | 637 | propagate = 1 |
|
641 | 638 | |
|
642 | 639 | [logger_sqlalchemy] |
|
643 | 640 | level = INFO |
|
644 | 641 | handlers = console_sql |
|
645 | 642 | qualname = sqlalchemy.engine |
|
646 | 643 | propagate = 0 |
|
647 | 644 | |
|
648 | 645 | ############## |
|
649 | 646 | ## HANDLERS ## |
|
650 | 647 | ############## |
|
651 | 648 | |
|
652 | 649 | [handler_console] |
|
653 | 650 | class = StreamHandler |
|
654 | 651 | args = (sys.stderr, ) |
|
655 | 652 | level = DEBUG |
|
656 | 653 | formatter = color_formatter |
|
657 | 654 | |
|
658 | 655 | [handler_console_sql] |
|
659 | 656 | class = StreamHandler |
|
660 | 657 | args = (sys.stderr, ) |
|
661 | 658 | level = DEBUG |
|
662 | 659 | formatter = color_formatter_sql |
|
663 | 660 | |
|
664 | 661 | ################ |
|
665 | 662 | ## FORMATTERS ## |
|
666 | 663 | ################ |
|
667 | 664 | |
|
668 | 665 | [formatter_generic] |
|
669 |
class = rhodecode.lib.logging_formatter. |
|
|
666 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter | |
|
670 | 667 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
671 | 668 | datefmt = %Y-%m-%d %H:%M:%S |
|
672 | 669 | |
|
673 | 670 | [formatter_color_formatter] |
|
674 | 671 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
675 | 672 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
676 | 673 | datefmt = %Y-%m-%d %H:%M:%S |
|
677 | 674 | |
|
678 | 675 | [formatter_color_formatter_sql] |
|
679 | 676 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
680 | 677 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
681 | 678 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,650 +1,647 b'' | |||
|
1 | 1 | |
|
2 | 2 | |
|
3 | 3 | ################################################################################ |
|
4 | 4 | ## RHODECODE COMMUNITY EDITION CONFIGURATION ## |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ## EMAIL CONFIGURATION ## |
|
13 | 13 | ## Uncomment and replace with the email address which should receive ## |
|
14 | 14 | ## any error reports after an application crash ## |
|
15 | 15 | ## Additionally these settings will be used by the RhodeCode mailing system ## |
|
16 | 16 | ################################################################################ |
|
17 | 17 | |
|
18 | 18 | ## prefix all emails subjects with given prefix, helps filtering out emails |
|
19 | 19 | #email_prefix = [RhodeCode] |
|
20 | 20 | |
|
21 | 21 | ## email FROM address all mails will be sent |
|
22 | 22 | #app_email_from = rhodecode-noreply@localhost |
|
23 | 23 | |
|
24 | 24 | ## Uncomment and replace with the address which should receive any error report |
|
25 | 25 | ## note: using appenlight for error handling doesn't need this to be uncommented |
|
26 | 26 | #email_to = admin@localhost |
|
27 | 27 | |
|
28 | 28 | ## in case of Application errors, sent an error email form |
|
29 | 29 | #error_email_from = rhodecode_error@localhost |
|
30 | 30 | |
|
31 | 31 | ## additional error message to be send in case of server crash |
|
32 | 32 | #error_message = |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | #smtp_server = mail.server.com |
|
36 | 36 | #smtp_username = |
|
37 | 37 | #smtp_password = |
|
38 | 38 | #smtp_port = |
|
39 | 39 | #smtp_use_tls = false |
|
40 | 40 | #smtp_use_ssl = true |
|
41 | 41 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
42 | 42 | #smtp_auth = |
|
43 | 43 | |
|
44 | 44 | [server:main] |
|
45 | 45 | ## COMMON ## |
|
46 | 46 | host = 127.0.0.1 |
|
47 | 47 | port = 5000 |
|
48 | 48 | |
|
49 | 49 | ################################## |
|
50 | 50 | ## WAITRESS WSGI SERVER ## |
|
51 | 51 | ## Recommended for Development ## |
|
52 | 52 | ################################## |
|
53 | 53 | |
|
54 | 54 | #use = egg:waitress#main |
|
55 | 55 | ## number of worker threads |
|
56 | 56 | #threads = 5 |
|
57 | 57 | ## MAX BODY SIZE 100GB |
|
58 | 58 | #max_request_body_size = 107374182400 |
|
59 | 59 | ## Use poll instead of select, fixes file descriptors limits problems. |
|
60 | 60 | ## May not work on old windows systems. |
|
61 | 61 | #asyncore_use_poll = true |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | ########################## |
|
65 | 65 | ## GUNICORN WSGI SERVER ## |
|
66 | 66 | ########################## |
|
67 | 67 | ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini |
|
68 | 68 | |
|
69 | 69 | use = egg:gunicorn#main |
|
70 | 70 | ## Sets the number of process workers. You must set `instance_id = *` |
|
71 | 71 | ## when this option is set to more than one worker, recommended |
|
72 | 72 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers |
|
73 | 73 | ## The `instance_id = *` must be set in the [app:main] section below |
|
74 | 74 | workers = 2 |
|
75 | 75 | ## number of threads for each of the worker, must be set to 1 for gevent |
|
76 | 76 | ## generally recommened to be at 1 |
|
77 | 77 | #threads = 1 |
|
78 | 78 | ## process name |
|
79 | 79 | proc_name = rhodecode |
|
80 | 80 | ## type of worker class, one of sync, gevent |
|
81 | 81 | ## recommended for bigger setup is using of of other than sync one |
|
82 | 82 | worker_class = sync |
|
83 | 83 | ## The maximum number of simultaneous clients. Valid only for Gevent |
|
84 | 84 | #worker_connections = 10 |
|
85 | 85 | ## max number of requests that worker will handle before being gracefully |
|
86 | 86 | ## restarted, could prevent memory leaks |
|
87 | 87 | max_requests = 1000 |
|
88 | 88 | max_requests_jitter = 30 |
|
89 | 89 | ## amount of time a worker can spend with handling a request before it |
|
90 | 90 | ## gets killed and restarted. Set to 6hrs |
|
91 | 91 | timeout = 21600 |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | ## prefix middleware for RhodeCode. |
|
95 | 95 | ## recommended when using proxy setup. |
|
96 | 96 | ## allows to set RhodeCode under a prefix in server. |
|
97 | 97 | ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
98 | 98 | ## And set your prefix like: `prefix = /custom_prefix` |
|
99 | 99 | ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
100 | 100 | ## to make your cookies only work on prefix url |
|
101 | 101 | [filter:proxy-prefix] |
|
102 | 102 | use = egg:PasteDeploy#prefix |
|
103 | 103 | prefix = / |
|
104 | 104 | |
|
105 | 105 | [app:main] |
|
106 | 106 | use = egg:rhodecode-enterprise-ce |
|
107 | 107 | |
|
108 | 108 | ## enable proxy prefix middleware, defined above |
|
109 | 109 | #filter-with = proxy-prefix |
|
110 | 110 | |
|
111 | 111 | ## encryption key used to encrypt social plugin tokens, |
|
112 | 112 | ## remote_urls with credentials etc, if not set it defaults to |
|
113 | 113 | ## `beaker.session.secret` |
|
114 | 114 | #rhodecode.encrypted_values.secret = |
|
115 | 115 | |
|
116 | 116 | ## decryption strict mode (enabled by default). It controls if decryption raises |
|
117 | 117 | ## `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
118 | 118 | #rhodecode.encrypted_values.strict = false |
|
119 | 119 | |
|
120 | 120 | ## return gzipped responses from Rhodecode (static files/application) |
|
121 | 121 | gzip_responses = false |
|
122 | 122 | |
|
123 | 123 | ## autogenerate javascript routes file on startup |
|
124 | 124 | generate_js_files = false |
|
125 | 125 | |
|
126 | 126 | ## Optional Languages |
|
127 | 127 | ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
128 | 128 | lang = en |
|
129 | 129 | |
|
130 | 130 | ## perform a full repository scan on each server start, this should be |
|
131 | 131 | ## set to false after first startup, to allow faster server restarts. |
|
132 | 132 | startup.import_repos = false |
|
133 | 133 | |
|
134 | 134 | ## Uncomment and set this path to use archive download cache. |
|
135 | 135 | ## Once enabled, generated archives will be cached at this location |
|
136 | 136 | ## and served from the cache during subsequent requests for the same archive of |
|
137 | 137 | ## the repository. |
|
138 | 138 | #archive_cache_dir = /tmp/tarballcache |
|
139 | 139 | |
|
140 | 140 | ## change this to unique ID for security |
|
141 | 141 | app_instance_uuid = rc-production |
|
142 | 142 | |
|
143 | 143 | ## cut off limit for large diffs (size in bytes) |
|
144 | 144 | cut_off_limit_diff = 1024000 |
|
145 | 145 | cut_off_limit_file = 256000 |
|
146 | 146 | |
|
147 | 147 | ## use cache version of scm repo everywhere |
|
148 | 148 | vcs_full_cache = true |
|
149 | 149 | |
|
150 | 150 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
151 | 151 | ## Normally this is controlled by proper http flags sent from http server |
|
152 | 152 | force_https = false |
|
153 | 153 | |
|
154 | 154 | ## use Strict-Transport-Security headers |
|
155 | 155 | use_htsts = false |
|
156 | 156 | |
|
157 | 157 | ## number of commits stats will parse on each iteration |
|
158 | 158 | commit_parse_limit = 25 |
|
159 | 159 | |
|
160 | 160 | ## git rev filter option, --all is the default filter, if you need to |
|
161 | 161 | ## hide all refs in changelog switch this to --branches --tags |
|
162 | 162 | git_rev_filter = --branches --tags |
|
163 | 163 | |
|
164 | 164 | # Set to true if your repos are exposed using the dumb protocol |
|
165 | 165 | git_update_server_info = false |
|
166 | 166 | |
|
167 | 167 | ## RSS/ATOM feed options |
|
168 | 168 | rss_cut_off_limit = 256000 |
|
169 | 169 | rss_items_per_page = 10 |
|
170 | 170 | rss_include_diff = false |
|
171 | 171 | |
|
172 | 172 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
173 | 173 | ## url that does rewrites to _admin/gists/{gistid}. |
|
174 | 174 | ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
175 | 175 | ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
176 | 176 | gist_alias_url = |
|
177 | 177 | |
|
178 | 178 | ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be |
|
179 | 179 | ## used for access. |
|
180 | 180 | ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
181 | 181 | ## came from the the logged in user who own this authentication token. |
|
182 | 182 | ## |
|
183 | 183 | ## Syntax is ControllerClass:function_pattern. |
|
184 | 184 | ## To enable access to raw_files put `FilesController:raw`. |
|
185 | 185 | ## To enable access to patches add `ChangesetController:changeset_patch`. |
|
186 | 186 | ## The list should be "," separated and on a single line. |
|
187 | 187 | ## |
|
188 | 188 | ## Recommended controllers to enable: |
|
189 | 189 | # ChangesetController:changeset_patch, |
|
190 | 190 | # ChangesetController:changeset_raw, |
|
191 | 191 | # FilesController:raw, |
|
192 | 192 | # FilesController:archivefile, |
|
193 | 193 | # GistsController:*, |
|
194 | 194 | api_access_controllers_whitelist = |
|
195 | 195 | |
|
196 | 196 | ## default encoding used to convert from and to unicode |
|
197 | 197 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
198 | 198 | default_encoding = UTF-8 |
|
199 | 199 | |
|
200 | 200 | ## instance-id prefix |
|
201 | 201 | ## a prefix key for this instance used for cache invalidation when running |
|
202 | 202 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
203 | 203 | ## all running rhodecode instances. Leave empty if you don't use it |
|
204 | 204 | instance_id = |
|
205 | 205 | |
|
206 | 206 | ## Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
207 | 207 | ## of an authentication plugin also if it is disabled by it's settings. |
|
208 | 208 | ## This could be useful if you are unable to log in to the system due to broken |
|
209 | 209 | ## authentication settings. Then you can enable e.g. the internal rhodecode auth |
|
210 | 210 | ## module to log in again and fix the settings. |
|
211 | 211 | ## |
|
212 | 212 | ## Available builtin plugin IDs (hash is part of the ID): |
|
213 | 213 | ## egg:rhodecode-enterprise-ce#rhodecode |
|
214 | 214 | ## egg:rhodecode-enterprise-ce#pam |
|
215 | 215 | ## egg:rhodecode-enterprise-ce#ldap |
|
216 | 216 | ## egg:rhodecode-enterprise-ce#jasig_cas |
|
217 | 217 | ## egg:rhodecode-enterprise-ce#headers |
|
218 | 218 | ## egg:rhodecode-enterprise-ce#crowd |
|
219 | 219 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
220 | 220 | |
|
221 | 221 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
222 | 222 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
223 | 223 | ## handling that causing a series of failed authentication calls. |
|
224 | 224 | ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
225 | 225 | ## This will be served instead of default 401 on bad authnetication |
|
226 | 226 | auth_ret_code = |
|
227 | 227 | |
|
228 | 228 | ## use special detection method when serving auth_ret_code, instead of serving |
|
229 | 229 | ## ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
230 | 230 | ## and then serve auth_ret_code to clients |
|
231 | 231 | auth_ret_code_detection = false |
|
232 | 232 | |
|
233 | 233 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
234 | 234 | ## codes don't break the transactions while 4XX codes do |
|
235 | 235 | lock_ret_code = 423 |
|
236 | 236 | |
|
237 | 237 | ## allows to change the repository location in settings page |
|
238 | 238 | allow_repo_location_change = true |
|
239 | 239 | |
|
240 | 240 | ## allows to setup custom hooks in settings page |
|
241 | 241 | allow_custom_hooks_settings = true |
|
242 | 242 | |
|
243 | 243 | ## generated license token, goto license page in RhodeCode settings to obtain |
|
244 | 244 | ## new token |
|
245 | 245 | license_token = |
|
246 | 246 | |
|
247 | 247 | ## supervisor connection uri, for managing supervisor and logs. |
|
248 | 248 | supervisor.uri = |
|
249 | 249 | ## supervisord group name/id we only want this RC instance to handle |
|
250 | 250 | supervisor.group_id = prod |
|
251 | 251 | |
|
252 | 252 | ## Display extended labs settings |
|
253 | 253 | labs_settings_active = true |
|
254 | 254 | |
|
255 | 255 | #################################### |
|
256 | 256 | ### CELERY CONFIG #### |
|
257 | 257 | #################################### |
|
258 | 258 | use_celery = false |
|
259 | 259 | broker.host = localhost |
|
260 | 260 | broker.vhost = rabbitmqhost |
|
261 | 261 | broker.port = 5672 |
|
262 | 262 | broker.user = rabbitmq |
|
263 | 263 | broker.password = qweqwe |
|
264 | 264 | |
|
265 | 265 | celery.imports = rhodecode.lib.celerylib.tasks |
|
266 | 266 | |
|
267 | 267 | celery.result.backend = amqp |
|
268 | 268 | celery.result.dburi = amqp:// |
|
269 | 269 | celery.result.serialier = json |
|
270 | 270 | |
|
271 | 271 | #celery.send.task.error.emails = true |
|
272 | 272 | #celery.amqp.task.result.expires = 18000 |
|
273 | 273 | |
|
274 | 274 | celeryd.concurrency = 2 |
|
275 | 275 | #celeryd.log.file = celeryd.log |
|
276 | 276 | celeryd.log.level = debug |
|
277 | 277 | celeryd.max.tasks.per.child = 1 |
|
278 | 278 | |
|
279 | 279 | ## tasks will never be sent to the queue, but executed locally instead. |
|
280 | 280 | celery.always.eager = false |
|
281 | 281 | |
|
282 | 282 | #################################### |
|
283 | 283 | ### BEAKER CACHE #### |
|
284 | 284 | #################################### |
|
285 | 285 | # default cache dir for templates. Putting this into a ramdisk |
|
286 | 286 | ## can boost performance, eg. %(here)s/data_ramdisk |
|
287 | 287 | cache_dir = %(here)s/data |
|
288 | 288 | |
|
289 | 289 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
290 | 290 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
291 | 291 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
292 | 292 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
293 | 293 | |
|
294 | 294 | beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long |
|
295 | 295 | |
|
296 | 296 | beaker.cache.super_short_term.type = memory |
|
297 | 297 | beaker.cache.super_short_term.expire = 10 |
|
298 | 298 | beaker.cache.super_short_term.key_length = 256 |
|
299 | 299 | |
|
300 | 300 | beaker.cache.short_term.type = memory |
|
301 | 301 | beaker.cache.short_term.expire = 60 |
|
302 | 302 | beaker.cache.short_term.key_length = 256 |
|
303 | 303 | |
|
304 | 304 | beaker.cache.long_term.type = memory |
|
305 | 305 | beaker.cache.long_term.expire = 36000 |
|
306 | 306 | beaker.cache.long_term.key_length = 256 |
|
307 | 307 | |
|
308 | 308 | beaker.cache.sql_cache_short.type = memory |
|
309 | 309 | beaker.cache.sql_cache_short.expire = 10 |
|
310 | 310 | beaker.cache.sql_cache_short.key_length = 256 |
|
311 | 311 | |
|
312 | 312 | ## default is memory cache, configure only if required |
|
313 | 313 | ## using multi-node or multi-worker setup |
|
314 | 314 | #beaker.cache.auth_plugins.type = ext:database |
|
315 | 315 | #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock |
|
316 | 316 | #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode |
|
317 | 317 | #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode |
|
318 | 318 | #beaker.cache.auth_plugins.sa.pool_recycle = 3600 |
|
319 | 319 | #beaker.cache.auth_plugins.sa.pool_size = 10 |
|
320 | 320 | #beaker.cache.auth_plugins.sa.max_overflow = 0 |
|
321 | 321 | |
|
322 | 322 | beaker.cache.repo_cache_long.type = memorylru_base |
|
323 | 323 | beaker.cache.repo_cache_long.max_items = 4096 |
|
324 | 324 | beaker.cache.repo_cache_long.expire = 2592000 |
|
325 | 325 | |
|
326 | 326 | ## default is memorylru_base cache, configure only if required |
|
327 | 327 | ## using multi-node or multi-worker setup |
|
328 | 328 | #beaker.cache.repo_cache_long.type = ext:memcached |
|
329 | 329 | #beaker.cache.repo_cache_long.url = localhost:11211 |
|
330 | 330 | #beaker.cache.repo_cache_long.expire = 1209600 |
|
331 | 331 | #beaker.cache.repo_cache_long.key_length = 256 |
|
332 | 332 | |
|
333 | 333 | #################################### |
|
334 | 334 | ### BEAKER SESSION #### |
|
335 | 335 | #################################### |
|
336 | 336 | |
|
337 | 337 | ## .session.type is type of storage options for the session, current allowed |
|
338 | 338 | ## types are file, ext:memcached, ext:database, and memory (default). |
|
339 | 339 | beaker.session.type = file |
|
340 | 340 | beaker.session.data_dir = %(here)s/data/sessions/data |
|
341 | 341 | |
|
342 | 342 | ## db based session, fast, and allows easy management over logged in users |
|
343 | 343 | #beaker.session.type = ext:database |
|
344 | 344 | #beaker.session.table_name = db_session |
|
345 | 345 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
346 | 346 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
347 | 347 | #beaker.session.sa.pool_recycle = 3600 |
|
348 | 348 | #beaker.session.sa.echo = false |
|
349 | 349 | |
|
350 | 350 | beaker.session.key = rhodecode |
|
351 | 351 | beaker.session.secret = production-rc-uytcxaz |
|
352 | 352 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
353 | 353 | |
|
354 | 354 | ## Secure encrypted cookie. Requires AES and AES python libraries |
|
355 | 355 | ## you must disable beaker.session.secret to use this |
|
356 | 356 | #beaker.session.encrypt_key = key_for_encryption |
|
357 | 357 | #beaker.session.validate_key = validation_key |
|
358 | 358 | |
|
359 | 359 | ## sets session as invalid(also logging out user) if it haven not been |
|
360 | 360 | ## accessed for given amount of time in seconds |
|
361 | 361 | beaker.session.timeout = 2592000 |
|
362 | 362 | beaker.session.httponly = true |
|
363 | 363 | ## Path to use for the cookie. Set to prefix if you use prefix middleware |
|
364 | 364 | #beaker.session.cookie_path = /custom_prefix |
|
365 | 365 | |
|
366 | 366 | ## uncomment for https secure cookie |
|
367 | 367 | beaker.session.secure = false |
|
368 | 368 | |
|
369 | 369 | ## auto save the session to not to use .save() |
|
370 | 370 | beaker.session.auto = false |
|
371 | 371 | |
|
372 | 372 | ## default cookie expiration time in seconds, set to `true` to set expire |
|
373 | 373 | ## at browser close |
|
374 | 374 | #beaker.session.cookie_expires = 3600 |
|
375 | 375 | |
|
376 | 376 | ################################### |
|
377 | 377 | ## SEARCH INDEXING CONFIGURATION ## |
|
378 | 378 | ################################### |
|
379 | 379 | ## Full text search indexer is available in rhodecode-tools under |
|
380 | 380 | ## `rhodecode-tools index` command |
|
381 | 381 | |
|
382 | 382 | ## WHOOSH Backend, doesn't require additional services to run |
|
383 | 383 | ## it works good with few dozen repos |
|
384 | 384 | search.module = rhodecode.lib.index.whoosh |
|
385 | 385 | search.location = %(here)s/data/index |
|
386 | 386 | |
|
387 | 387 | ######################################## |
|
388 | 388 | ### CHANNELSTREAM CONFIG #### |
|
389 | 389 | ######################################## |
|
390 | 390 | ## channelstream enables persistent connections and live notification |
|
391 | 391 | ## in the system. It's also used by the chat system |
|
392 | 392 | channelstream.enabled = false |
|
393 | 393 | |
|
394 | 394 | ## server address for channelstream server on the backend |
|
395 | 395 | channelstream.server = 127.0.0.1:9800 |
|
396 | 396 | |
|
397 | 397 | ## location of the channelstream server from outside world |
|
398 | 398 | ## use ws:// for http or wss:// for https. This address needs to be handled |
|
399 | 399 | ## by external HTTP server such as Nginx or Apache |
|
400 | 400 | ## see nginx/apache configuration examples in our docs |
|
401 | 401 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
402 | 402 | channelstream.secret = secret |
|
403 | 403 | channelstream.history.location = %(here)s/channelstream_history |
|
404 | 404 | |
|
405 | 405 | ## Internal application path that Javascript uses to connect into. |
|
406 | 406 | ## If you use proxy-prefix the prefix should be added before /_channelstream |
|
407 | 407 | channelstream.proxy_path = /_channelstream |
|
408 | 408 | |
|
409 | 409 | |
|
410 | 410 | ################################### |
|
411 | 411 | ## APPENLIGHT CONFIG ## |
|
412 | 412 | ################################### |
|
413 | 413 | |
|
414 | 414 | ## Appenlight is tailored to work with RhodeCode, see |
|
415 | 415 | ## http://appenlight.com for details how to obtain an account |
|
416 | 416 | |
|
417 | 417 | ## appenlight integration enabled |
|
418 | 418 | appenlight = false |
|
419 | 419 | |
|
420 | 420 | appenlight.server_url = https://api.appenlight.com |
|
421 | 421 | appenlight.api_key = YOUR_API_KEY |
|
422 | 422 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
423 | 423 | |
|
424 | 424 | # used for JS client |
|
425 | 425 | appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
426 | 426 | |
|
427 | 427 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
428 | 428 | |
|
429 | 429 | ## enables 404 error logging (default False) |
|
430 | 430 | appenlight.report_404 = false |
|
431 | 431 | |
|
432 | 432 | ## time in seconds after request is considered being slow (default 1) |
|
433 | 433 | appenlight.slow_request_time = 1 |
|
434 | 434 | |
|
435 | 435 | ## record slow requests in application |
|
436 | 436 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
437 | 437 | appenlight.slow_requests = true |
|
438 | 438 | |
|
439 | 439 | ## enable hooking to application loggers |
|
440 | 440 | appenlight.logging = true |
|
441 | 441 | |
|
442 | 442 | ## minimum log level for log capture |
|
443 | 443 | appenlight.logging.level = WARNING |
|
444 | 444 | |
|
445 | 445 | ## send logs only from erroneous/slow requests |
|
446 | 446 | ## (saves API quota for intensive logging) |
|
447 | 447 | appenlight.logging_on_error = false |
|
448 | 448 | |
|
449 | 449 | ## list of additonal keywords that should be grabbed from environ object |
|
450 | 450 | ## can be string with comma separated list of words in lowercase |
|
451 | 451 | ## (by default client will always send following info: |
|
452 | 452 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
453 | 453 | ## start with HTTP* this list be extended with additional keywords here |
|
454 | 454 | appenlight.environ_keys_whitelist = |
|
455 | 455 | |
|
456 | 456 | ## list of keywords that should be blanked from request object |
|
457 | 457 | ## can be string with comma separated list of words in lowercase |
|
458 | 458 | ## (by default client will always blank keys that contain following words |
|
459 | 459 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
460 | 460 | ## this list be extended with additional keywords set here |
|
461 | 461 | appenlight.request_keys_blacklist = |
|
462 | 462 | |
|
463 | 463 | ## list of namespaces that should be ignores when gathering log entries |
|
464 | 464 | ## can be string with comma separated list of namespaces |
|
465 | 465 | ## (by default the client ignores own entries: appenlight_client.client) |
|
466 | 466 | appenlight.log_namespace_blacklist = |
|
467 | 467 | |
|
468 | 468 | |
|
469 | 469 | ################################################################################ |
|
470 | 470 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
471 | 471 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
472 | 472 | ## execute malicious code after an exception is raised. ## |
|
473 | 473 | ################################################################################ |
|
474 | 474 | set debug = false |
|
475 | 475 | |
|
476 | 476 | |
|
477 | 477 | ########################################### |
|
478 | 478 | ### MAIN RHODECODE DATABASE CONFIG ### |
|
479 | 479 | ########################################### |
|
480 | 480 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
481 | 481 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
482 | 482 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode |
|
483 | 483 | sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
484 | 484 | |
|
485 | 485 | # see sqlalchemy docs for other advanced settings |
|
486 | 486 | |
|
487 | 487 | ## print the sql statements to output |
|
488 | 488 | sqlalchemy.db1.echo = false |
|
489 | 489 | ## recycle the connections after this amount of seconds |
|
490 | 490 | sqlalchemy.db1.pool_recycle = 3600 |
|
491 | 491 | sqlalchemy.db1.convert_unicode = true |
|
492 | 492 | |
|
493 | 493 | ## the number of connections to keep open inside the connection pool. |
|
494 | 494 | ## 0 indicates no limit |
|
495 | 495 | #sqlalchemy.db1.pool_size = 5 |
|
496 | 496 | |
|
497 | 497 | ## the number of connections to allow in connection pool "overflow", that is |
|
498 | 498 | ## connections that can be opened above and beyond the pool_size setting, |
|
499 | 499 | ## which defaults to five. |
|
500 | 500 | #sqlalchemy.db1.max_overflow = 10 |
|
501 | 501 | |
|
502 | 502 | |
|
503 | 503 | ################## |
|
504 | 504 | ### VCS CONFIG ### |
|
505 | 505 | ################## |
|
506 | 506 | vcs.server.enable = true |
|
507 | 507 | vcs.server = localhost:9900 |
|
508 | 508 | |
|
509 | 509 | ## Web server connectivity protocol, responsible for web based VCS operatations |
|
510 | 510 | ## Available protocols are: |
|
511 | ## `pyro4` - use pyro4 server | |
|
512 | 511 | ## `http` - use http-rpc backend (default) |
|
513 | 512 | vcs.server.protocol = http |
|
514 | 513 | |
|
515 | 514 | ## Push/Pull operations protocol, available options are: |
|
516 | ## `pyro4` - use pyro4 server | |
|
517 | 515 | ## `http` - use http-rpc backend (default) |
|
518 | 516 | ## |
|
519 | 517 | vcs.scm_app_implementation = http |
|
520 | 518 | |
|
521 | 519 | ## Push/Pull operations hooks protocol, available options are: |
|
522 | ## `pyro4` - use pyro4 server | |
|
523 | 520 | ## `http` - use http-rpc backend (default) |
|
524 | 521 | vcs.hooks.protocol = http |
|
525 | 522 | |
|
526 | 523 | vcs.server.log_level = info |
|
527 | 524 | ## Start VCSServer with this instance as a subprocess, usefull for development |
|
528 | 525 | vcs.start_server = false |
|
529 | 526 | |
|
530 | 527 | ## List of enabled VCS backends, available options are: |
|
531 | 528 | ## `hg` - mercurial |
|
532 | 529 | ## `git` - git |
|
533 | 530 | ## `svn` - subversion |
|
534 | 531 | vcs.backends = hg, git, svn |
|
535 | 532 | |
|
536 | 533 | vcs.connection_timeout = 3600 |
|
537 | 534 | ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
538 | 535 | ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible |
|
539 | 536 | #vcs.svn.compatible_version = pre-1.8-compatible |
|
540 | 537 | |
|
541 | 538 | |
|
542 | 539 | ############################################################ |
|
543 | 540 | ### Subversion proxy support (mod_dav_svn) ### |
|
544 | 541 | ### Maps RhodeCode repo groups into SVN paths for Apache ### |
|
545 | 542 | ############################################################ |
|
546 | 543 | ## Enable or disable the config file generation. |
|
547 | 544 | svn.proxy.generate_config = false |
|
548 | 545 | ## Generate config file with `SVNListParentPath` set to `On`. |
|
549 | 546 | svn.proxy.list_parent_path = true |
|
550 | 547 | ## Set location and file name of generated config file. |
|
551 | 548 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
552 | 549 | ## Used as a prefix to the `Location` block in the generated config file. |
|
553 | 550 | ## In most cases it should be set to `/`. |
|
554 | 551 | svn.proxy.location_root = / |
|
555 | 552 | ## Command to reload the mod dav svn configuration on change. |
|
556 | 553 | ## Example: `/etc/init.d/apache2 reload` |
|
557 | 554 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
558 | 555 | ## If the timeout expires before the reload command finishes, the command will |
|
559 | 556 | ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
560 | 557 | #svn.proxy.reload_timeout = 10 |
|
561 | 558 | |
|
562 | 559 | ## Dummy marker to add new entries after. |
|
563 | 560 | ## Add any custom entries below. Please don't remove. |
|
564 | 561 | custom.conf = 1 |
|
565 | 562 | |
|
566 | 563 | |
|
567 | 564 | ################################ |
|
568 | 565 | ### LOGGING CONFIGURATION #### |
|
569 | 566 | ################################ |
|
570 | 567 | [loggers] |
|
571 | 568 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates |
|
572 | 569 | |
|
573 | 570 | [handlers] |
|
574 | 571 | keys = console, console_sql |
|
575 | 572 | |
|
576 | 573 | [formatters] |
|
577 | 574 | keys = generic, color_formatter, color_formatter_sql |
|
578 | 575 | |
|
579 | 576 | ############# |
|
580 | 577 | ## LOGGERS ## |
|
581 | 578 | ############# |
|
582 | 579 | [logger_root] |
|
583 | 580 | level = NOTSET |
|
584 | 581 | handlers = console |
|
585 | 582 | |
|
586 | 583 | [logger_routes] |
|
587 | 584 | level = DEBUG |
|
588 | 585 | handlers = |
|
589 | 586 | qualname = routes.middleware |
|
590 | 587 | ## "level = DEBUG" logs the route matched and routing variables. |
|
591 | 588 | propagate = 1 |
|
592 | 589 | |
|
593 | 590 | [logger_beaker] |
|
594 | 591 | level = DEBUG |
|
595 | 592 | handlers = |
|
596 | 593 | qualname = beaker.container |
|
597 | 594 | propagate = 1 |
|
598 | 595 | |
|
599 | 596 | [logger_templates] |
|
600 | 597 | level = INFO |
|
601 | 598 | handlers = |
|
602 | 599 | qualname = pylons.templating |
|
603 | 600 | propagate = 1 |
|
604 | 601 | |
|
605 | 602 | [logger_rhodecode] |
|
606 | 603 | level = DEBUG |
|
607 | 604 | handlers = |
|
608 | 605 | qualname = rhodecode |
|
609 | 606 | propagate = 1 |
|
610 | 607 | |
|
611 | 608 | [logger_sqlalchemy] |
|
612 | 609 | level = INFO |
|
613 | 610 | handlers = console_sql |
|
614 | 611 | qualname = sqlalchemy.engine |
|
615 | 612 | propagate = 0 |
|
616 | 613 | |
|
617 | 614 | ############## |
|
618 | 615 | ## HANDLERS ## |
|
619 | 616 | ############## |
|
620 | 617 | |
|
621 | 618 | [handler_console] |
|
622 | 619 | class = StreamHandler |
|
623 | 620 | args = (sys.stderr, ) |
|
624 | 621 | level = INFO |
|
625 | 622 | formatter = generic |
|
626 | 623 | |
|
627 | 624 | [handler_console_sql] |
|
628 | 625 | class = StreamHandler |
|
629 | 626 | args = (sys.stderr, ) |
|
630 | 627 | level = WARN |
|
631 | 628 | formatter = generic |
|
632 | 629 | |
|
633 | 630 | ################ |
|
634 | 631 | ## FORMATTERS ## |
|
635 | 632 | ################ |
|
636 | 633 | |
|
637 | 634 | [formatter_generic] |
|
638 |
class = rhodecode.lib.logging_formatter. |
|
|
635 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter | |
|
639 | 636 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
640 | 637 | datefmt = %Y-%m-%d %H:%M:%S |
|
641 | 638 | |
|
642 | 639 | [formatter_color_formatter] |
|
643 | 640 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
644 | 641 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
645 | 642 | datefmt = %Y-%m-%d %H:%M:%S |
|
646 | 643 | |
|
647 | 644 | [formatter_color_formatter_sql] |
|
648 | 645 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
649 | 646 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
650 | 647 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,23 +1,22 b'' | |||
|
1 | 1 | |
|
2 | 2 | ====================================== |
|
3 | 3 | VCS client and VCSServer integration |
|
4 | 4 | ====================================== |
|
5 | 5 | |
|
6 | 6 | Enterprise uses the VCSServer as a backend to provide version control |
|
7 | 7 | functionalities. This section describes the components in Enterprise which talk |
|
8 | 8 | to the VCSServer. |
|
9 | 9 | |
|
10 | 10 | The client library is implemented in :mod:`rhodecode.lib.vcs`. For HTTP based |
|
11 | 11 | access of the command line clients special middlewares and utilities are |
|
12 | 12 | implemented in :mod:`rhodecode.lib.middleware`. |
|
13 | 13 | |
|
14 | 14 | |
|
15 | 15 | |
|
16 | 16 | |
|
17 | 17 | .. toctree:: |
|
18 | 18 | :maxdepth: 2 |
|
19 | 19 | |
|
20 | http-transition | |
|
21 | 20 | middleware |
|
22 | 21 | vcsserver |
|
23 | 22 | subversion |
@@ -1,130 +1,124 b'' | |||
|
1 | 1 | .. _debug-mode: |
|
2 | 2 | |
|
3 | 3 | Enabling Debug Mode |
|
4 | 4 | ------------------- |
|
5 | 5 | |
|
6 | 6 | To enable debug mode on a |RCE| instance you need to set the debug property |
|
7 | 7 | in the :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file. To |
|
8 | 8 | do this, use the following steps |
|
9 | 9 | |
|
10 | 10 | 1. Open the file and set the ``debug`` line to ``true`` |
|
11 | 11 | 2. Restart you instance using the ``rccontrol restart`` command, |
|
12 | 12 | see the following example: |
|
13 | 13 | |
|
14 | 14 | You can also set the log level, the follow are the valid options; |
|
15 | 15 | ``debug``, ``info``, ``warning``, or ``fatal``. |
|
16 | 16 | |
|
17 | 17 | .. code-block:: ini |
|
18 | 18 | |
|
19 | 19 | [DEFAULT] |
|
20 | 20 | debug = true |
|
21 | 21 | pdebug = false |
|
22 | 22 | |
|
23 | 23 | .. code-block:: bash |
|
24 | 24 | |
|
25 | 25 | # Restart your instance |
|
26 | 26 | $ rccontrol restart enterprise-1 |
|
27 | 27 | Instance "enterprise-1" successfully stopped. |
|
28 | 28 | Instance "enterprise-1" successfully started. |
|
29 | 29 | |
|
30 | 30 | Debug and Logging Configuration |
|
31 | 31 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |
|
32 | 32 | |
|
33 | 33 | Further debugging and logging settings can also be set in the |
|
34 | 34 | :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file. |
|
35 | 35 | |
|
36 | 36 | In the logging section, the various packages that run with |RCE| can have |
|
37 | 37 | different debug levels set. If you want to increase the logging level change |
|
38 | 38 | ``level = DEBUG`` line to one of the valid options. |
|
39 | 39 | |
|
40 | 40 | You also need to change the log level for handlers. See the example |
|
41 | 41 | ``##handler`` section below. The ``handler`` level takes the same options as |
|
42 | 42 | the ``debug`` level. |
|
43 | 43 | |
|
44 | 44 | .. code-block:: ini |
|
45 | 45 | |
|
46 | 46 | ################################ |
|
47 | 47 | ### LOGGING CONFIGURATION #### |
|
48 | 48 | ################################ |
|
49 | 49 | [loggers] |
|
50 |
keys = root, routes, rhodecode, sqlalchemy, beaker, |
|
|
50 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates | |
|
51 | 51 | |
|
52 | 52 | [handlers] |
|
53 | 53 | keys = console, console_sql, file, file_rotating |
|
54 | 54 | |
|
55 | 55 | [formatters] |
|
56 | 56 | keys = generic, color_formatter, color_formatter_sql |
|
57 | 57 | |
|
58 | 58 | ############# |
|
59 | 59 | ## LOGGERS ## |
|
60 | 60 | ############# |
|
61 | 61 | [logger_root] |
|
62 | 62 | level = NOTSET |
|
63 | 63 | handlers = console |
|
64 | 64 | |
|
65 | 65 | [logger_routes] |
|
66 | 66 | level = DEBUG |
|
67 | 67 | handlers = |
|
68 | 68 | qualname = routes.middleware |
|
69 | 69 | ## "level = DEBUG" logs the route matched and routing variables. |
|
70 | 70 | propagate = 1 |
|
71 | 71 | |
|
72 | 72 | [logger_beaker] |
|
73 | 73 | level = DEBUG |
|
74 | 74 | handlers = |
|
75 | 75 | qualname = beaker.container |
|
76 | 76 | propagate = 1 |
|
77 | 77 | |
|
78 | [logger_pyro4] | |
|
79 | level = DEBUG | |
|
80 | handlers = | |
|
81 | qualname = Pyro4 | |
|
82 | propagate = 1 | |
|
83 | ||
|
84 | 78 | [logger_templates] |
|
85 | 79 | level = INFO |
|
86 | 80 | handlers = |
|
87 | 81 | qualname = pylons.templating |
|
88 | 82 | propagate = 1 |
|
89 | 83 | |
|
90 | 84 | [logger_rhodecode] |
|
91 | 85 | level = DEBUG |
|
92 | 86 | handlers = |
|
93 | 87 | qualname = rhodecode |
|
94 | 88 | propagate = 1 |
|
95 | 89 | |
|
96 | 90 | [logger_sqlalchemy] |
|
97 | 91 | level = INFO |
|
98 | 92 | handlers = console_sql |
|
99 | 93 | qualname = sqlalchemy.engine |
|
100 | 94 | propagate = 0 |
|
101 | 95 | |
|
102 | 96 | ############## |
|
103 | 97 | ## HANDLERS ## |
|
104 | 98 | ############## |
|
105 | 99 | |
|
106 | 100 | [handler_console] |
|
107 | 101 | class = StreamHandler |
|
108 | 102 | args = (sys.stderr,) |
|
109 | 103 | level = INFO |
|
110 | 104 | formatter = generic |
|
111 | 105 | |
|
112 | 106 | [handler_console_sql] |
|
113 | 107 | class = StreamHandler |
|
114 | 108 | args = (sys.stderr,) |
|
115 | 109 | level = WARN |
|
116 | 110 | formatter = generic |
|
117 | 111 | |
|
118 | 112 | [handler_file] |
|
119 | 113 | class = FileHandler |
|
120 | 114 | args = ('rhodecode.log', 'a',) |
|
121 | 115 | level = INFO |
|
122 | 116 | formatter = generic |
|
123 | 117 | |
|
124 | 118 | [handler_file_rotating] |
|
125 | 119 | class = logging.handlers.TimedRotatingFileHandler |
|
126 | 120 | # 'D', 5 - rotate every 5days |
|
127 | 121 | # you can set 'h', 'midnight' |
|
128 | 122 | args = ('rhodecode.log', 'D', 5, 10,) |
|
129 | 123 | level = INFO |
|
130 | 124 | formatter = generic |
@@ -1,304 +1,298 b'' | |||
|
1 | 1 | .. _vcs-server: |
|
2 | 2 | |
|
3 | 3 | VCS Server Management |
|
4 | 4 | --------------------- |
|
5 | 5 | |
|
6 | 6 | The VCS Server handles |RCM| backend functionality. You need to configure |
|
7 | 7 | a VCS Server to run with a |RCM| instance. If you do not, you will be missing |
|
8 | 8 | the connection between |RCM| and its |repos|. This will cause error messages |
|
9 | 9 | on the web interface. You can run your setup in the following configurations, |
|
10 | 10 | currently the best performance is one VCS Server per |RCM| instance: |
|
11 | 11 | |
|
12 | 12 | * One VCS Server per |RCM| instance. |
|
13 | 13 | * One VCS Server handling multiple instances. |
|
14 | 14 | |
|
15 | 15 | .. important:: |
|
16 | 16 | |
|
17 | 17 | If your server locale settings are not correctly configured, |
|
18 | 18 | |RCE| and the VCS Server can run into issues. See this `Ask Ubuntu`_ post |
|
19 | 19 | which explains the problem and gives a solution. |
|
20 | 20 | |
|
21 | 21 | For more information, see the following sections: |
|
22 | 22 | |
|
23 | 23 | * :ref:`install-vcs` |
|
24 | 24 | * :ref:`config-vcs` |
|
25 | 25 | * :ref:`vcs-server-options` |
|
26 | 26 | * :ref:`vcs-server-versions` |
|
27 | 27 | * :ref:`vcs-server-maintain` |
|
28 | 28 | * :ref:`vcs-server-config-file` |
|
29 | 29 | * :ref:`svn-http` |
|
30 | 30 | |
|
31 | 31 | .. _install-vcs: |
|
32 | 32 | |
|
33 | 33 | VCS Server Installation |
|
34 | 34 | ^^^^^^^^^^^^^^^^^^^^^^^ |
|
35 | 35 | |
|
36 | 36 | To install a VCS Server, see |
|
37 | 37 | :ref:`Installing a VCS server <control:install-vcsserver>`. |
|
38 | 38 | |
|
39 | 39 | .. _config-vcs: |
|
40 | 40 | |
|
41 | 41 | Hooking |RCE| to its VCS Server |
|
42 | 42 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |
|
43 | 43 | |
|
44 | 44 | To configure a |RCE| instance to use a VCS server, see |
|
45 | 45 | :ref:`Configuring the VCS Server connection <control:manually-vcsserver-ini>`. |
|
46 | 46 | |
|
47 | 47 | .. _vcs-server-options: |
|
48 | 48 | |
|
49 | 49 | |RCE| VCS Server Options |
|
50 | 50 | ^^^^^^^^^^^^^^^^^^^^^^^^ |
|
51 | 51 | |
|
52 | 52 | The following list shows the available options on the |RCM| side of the |
|
53 | 53 | connection to the VCS Server. The settings are configured per |
|
54 | 54 | instance in the |
|
55 | 55 | :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file. |
|
56 | 56 | |
|
57 | 57 | .. rst-class:: dl-horizontal |
|
58 | 58 | |
|
59 | 59 | \vcs.backends <available-vcs-systems> |
|
60 | 60 | Set a comma-separated list of the |repo| options available from the |
|
61 | 61 | web interface. The default is ``hg, git, svn``, |
|
62 | 62 | which is all |repo| types available. |
|
63 | 63 | |
|
64 | 64 | \vcs.connection_timeout <seconds> |
|
65 | 65 | Set the length of time in seconds that the VCS Server waits for |
|
66 | 66 | requests to process. After the timeout expires, |
|
67 | 67 | the request is closed. The default is ``3600``. Set to a higher |
|
68 | 68 | number if you experience network latency, or timeout issues with very |
|
69 | 69 | large push/pull requests. |
|
70 | 70 | |
|
71 | 71 | \vcs.server.enable <boolean> |
|
72 | 72 | Enable or disable the VCS Server. The available options are ``true`` or |
|
73 | 73 | ``false``. The default is ``true``. |
|
74 | 74 | |
|
75 | 75 | \vcs.server <host:port> |
|
76 | 76 | Set the host, either hostname or IP Address, and port of the VCS server |
|
77 | 77 | you wish to run with your |RCM| instance. |
|
78 | 78 | |
|
79 | 79 | .. code-block:: ini |
|
80 | 80 | |
|
81 | 81 | ################## |
|
82 | 82 | ### VCS CONFIG ### |
|
83 | 83 | ################## |
|
84 | 84 | # set this line to match your VCS Server |
|
85 | 85 | vcs.server = 127.0.0.1:10004 |
|
86 | 86 | # Set to False to disable the VCS Server |
|
87 | 87 | vcs.server.enable = True |
|
88 | 88 | vcs.backends = hg, git, svn |
|
89 | 89 | vcs.connection_timeout = 3600 |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | .. _vcs-server-versions: |
|
93 | 93 | |
|
94 | 94 | VCS Server Versions |
|
95 | 95 | ^^^^^^^^^^^^^^^^^^^ |
|
96 | 96 | |
|
97 | 97 | An updated version of the VCS Server is released with each |RCE| version. Use |
|
98 | 98 | the VCS Server number that matches with the |RCE| version to pair the |
|
99 | 99 | appropriate ones together. For |RCE| versions pre 3.3.0, |
|
100 | 100 | VCS Server 1.X.Y works with |RCE| 3.X.Y, for example: |
|
101 | 101 | |
|
102 | 102 | * VCS Server 1.0.0 works with |RCE| 3.0.0 |
|
103 | 103 | * VCS Server 1.2.2 works with |RCE| 3.2.2 |
|
104 | 104 | |
|
105 | 105 | For |RCE| versions post 3.3.0, the VCS Server and |RCE| version numbers |
|
106 | 106 | match, for example: |
|
107 | 107 | |
|
108 | 108 | * VCS Server |release| works with |RCE| |release| |
|
109 | 109 | |
|
110 | 110 | .. _vcs-server-maintain: |
|
111 | 111 | |
|
112 | 112 | VCS Server Memory Optimization |
|
113 | 113 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |
|
114 | 114 | |
|
115 | 115 | To configure the VCS server to manage the cache efficiently, you need to |
|
116 | 116 | configure the following options in the |
|
117 | 117 | :file:`/home/{user}/.rccontrol/{vcsserver-id}/vcsserver.ini` file. Once |
|
118 | 118 | configured, restart the VCS Server. |
|
119 | 119 | |
|
120 | 120 | .. rst-class:: dl-horizontal |
|
121 | 121 | |
|
122 | 122 | \beaker.cache.repo_object.type = memorylru |
|
123 | 123 | Configures the cache to discard the least recently used items. |
|
124 | 124 | This setting takes the following valid options: |
|
125 | 125 | |
|
126 | 126 | * ``memorylru``: The default setting, which removes the least recently |
|
127 | 127 | used items from the cache. |
|
128 | 128 | * ``memory``: Runs the VCS Server without clearing the cache. |
|
129 | 129 | * ``nocache``: Runs the VCS Server without a cache. This will |
|
130 | 130 | dramatically reduce the VCS Server performance. |
|
131 | 131 | |
|
132 | 132 | \beaker.cache.repo_object.max_items = 100 |
|
133 | 133 | Sets the maximum number of items stored in the cache, before the cache |
|
134 | 134 | starts to be cleared. |
|
135 | 135 | |
|
136 | 136 | As a general rule of thumb, running this value at 120 resulted in a |
|
137 | 137 | 5GB cache. Running it at 240 resulted in a 9GB cache. Your results |
|
138 | 138 | will differ based on usage patterns and |repo| sizes. |
|
139 | 139 | |
|
140 | 140 | Tweaking this value to run at a fairly constant memory load on your |
|
141 | 141 | server will help performance. |
|
142 | 142 | |
|
143 | 143 | To clear the cache completely, you can restart the VCS Server. |
|
144 | 144 | |
|
145 | 145 | .. important:: |
|
146 | 146 | |
|
147 | 147 | While the VCS Server handles a restart gracefully on the web interface, |
|
148 | 148 | it will drop connections during push/pull requests. So it is recommended |
|
149 | 149 | you only perform this when there is very little traffic on the instance. |
|
150 | 150 | |
|
151 | 151 | Use the following example to restart your VCS Server, |
|
152 | 152 | for full details see the :ref:`RhodeCode Control CLI <control:rcc-cli>`. |
|
153 | 153 | |
|
154 | 154 | .. code-block:: bash |
|
155 | 155 | |
|
156 | 156 | $ rccontrol status |
|
157 | 157 | |
|
158 | 158 | .. code-block:: vim |
|
159 | 159 | |
|
160 | 160 | - NAME: vcsserver-1 |
|
161 | 161 | - STATUS: RUNNING |
|
162 | 162 | - TYPE: VCSServer |
|
163 | 163 | - VERSION: 1.0.0 |
|
164 | 164 | - URL: http://127.0.0.1:10001 |
|
165 | 165 | |
|
166 | 166 | $ rccontrol restart vcsserver-1 |
|
167 | 167 | Instance "vcsserver-1" successfully stopped. |
|
168 | 168 | Instance "vcsserver-1" successfully started. |
|
169 | 169 | |
|
170 | 170 | .. _vcs-server-config-file: |
|
171 | 171 | |
|
172 | 172 | VCS Server Configuration |
|
173 | 173 | ^^^^^^^^^^^^^^^^^^^^^^^^ |
|
174 | 174 | |
|
175 | 175 | You can configure settings for multiple VCS Servers on your |
|
176 | 176 | system using their individual configuration files. Use the following |
|
177 | 177 | properties inside the configuration file to set up your system. The default |
|
178 | 178 | location is :file:`home/{user}/.rccontrol/{vcsserver-id}/vcsserver.ini`. |
|
179 | 179 | For a more detailed explanation of the logger levers, see :ref:`debug-mode`. |
|
180 | 180 | |
|
181 | 181 | .. rst-class:: dl-horizontal |
|
182 | 182 | |
|
183 | 183 | \host <ip-address> |
|
184 | 184 | Set the host on which the VCS Server will run. |
|
185 | 185 | |
|
186 | 186 | \port <int> |
|
187 | 187 | Set the port number on which the VCS Server will be available. |
|
188 | 188 | |
|
189 | 189 | \locale <locale_utf> |
|
190 | 190 | Set the locale the VCS Server expects. |
|
191 | 191 | |
|
192 | 192 | \threadpool_size <int> |
|
193 | 193 | Set the size of the threadpool used to communicate |
|
194 | 194 | with the WSGI workers. This should be at least 6 times the number of |
|
195 | 195 | WSGI worker processes. |
|
196 | 196 | |
|
197 | 197 | \timeout <seconds> |
|
198 | 198 | Set the timeout for RPC communication in seconds. |
|
199 | 199 | |
|
200 | 200 | .. note:: |
|
201 | 201 | |
|
202 | 202 | After making changes, you need to restart your VCS Server to pick them up. |
|
203 | 203 | |
|
204 | 204 | .. code-block:: ini |
|
205 | 205 | |
|
206 | 206 | ################################################################################ |
|
207 | 207 | # RhodeCode VCSServer - configuration # |
|
208 | 208 | # # |
|
209 | 209 | ################################################################################ |
|
210 | 210 | |
|
211 | 211 | [DEFAULT] |
|
212 | 212 | host = 127.0.0.1 |
|
213 | 213 | port = 9900 |
|
214 | 214 | locale = en_US.UTF-8 |
|
215 | 215 | # number of worker threads, this should be set based on a formula threadpool=N*6 |
|
216 | 216 | # where N is number of RhodeCode Enterprise workers, eg. running 2 instances |
|
217 | 217 | # 8 gunicorn workers each would be 2 * 8 * 6 = 96, threadpool_size = 96 |
|
218 | 218 | threadpool_size = 16 |
|
219 | 219 | timeout = 0 |
|
220 | 220 | |
|
221 | 221 | # cache regions, please don't change |
|
222 | 222 | beaker.cache.regions = repo_object |
|
223 | 223 | beaker.cache.repo_object.type = memorylru |
|
224 | 224 | beaker.cache.repo_object.max_items = 1000 |
|
225 | 225 | |
|
226 | 226 | # cache auto-expires after N seconds |
|
227 | 227 | beaker.cache.repo_object.expire = 10 |
|
228 | 228 | beaker.cache.repo_object.enabled = true |
|
229 | 229 | |
|
230 | 230 | |
|
231 | 231 | ################################ |
|
232 | 232 | ### LOGGING CONFIGURATION #### |
|
233 | 233 | ################################ |
|
234 | 234 | [loggers] |
|
235 |
keys = root, vcsserver, |
|
|
235 | keys = root, vcsserver, beaker | |
|
236 | 236 | |
|
237 | 237 | [handlers] |
|
238 | 238 | keys = console |
|
239 | 239 | |
|
240 | 240 | [formatters] |
|
241 | 241 | keys = generic |
|
242 | 242 | |
|
243 | 243 | ############# |
|
244 | 244 | ## LOGGERS ## |
|
245 | 245 | ############# |
|
246 | 246 | [logger_root] |
|
247 | 247 | level = NOTSET |
|
248 | 248 | handlers = console |
|
249 | 249 | |
|
250 | 250 | [logger_vcsserver] |
|
251 | 251 | level = DEBUG |
|
252 | 252 | handlers = |
|
253 | 253 | qualname = vcsserver |
|
254 | 254 | propagate = 1 |
|
255 | 255 | |
|
256 | 256 | [logger_beaker] |
|
257 | 257 | level = DEBUG |
|
258 | 258 | handlers = |
|
259 | 259 | qualname = beaker |
|
260 | 260 | propagate = 1 |
|
261 | 261 | |
|
262 | [logger_pyro4] | |
|
263 | level = DEBUG | |
|
264 | handlers = | |
|
265 | qualname = Pyro4 | |
|
266 | propagate = 1 | |
|
267 | ||
|
268 | 262 | |
|
269 | 263 | ############## |
|
270 | 264 | ## HANDLERS ## |
|
271 | 265 | ############## |
|
272 | 266 | |
|
273 | 267 | [handler_console] |
|
274 | 268 | class = StreamHandler |
|
275 | 269 | args = (sys.stderr,) |
|
276 | 270 | level = DEBUG |
|
277 | 271 | formatter = generic |
|
278 | 272 | |
|
279 | 273 | [handler_file] |
|
280 | 274 | class = FileHandler |
|
281 | 275 | args = ('vcsserver.log', 'a',) |
|
282 | 276 | level = DEBUG |
|
283 | 277 | formatter = generic |
|
284 | 278 | |
|
285 | 279 | [handler_file_rotating] |
|
286 | 280 | class = logging.handlers.TimedRotatingFileHandler |
|
287 | 281 | # 'D', 5 - rotate every 5days |
|
288 | 282 | # you can set 'h', 'midnight' |
|
289 | 283 | args = ('vcsserver.log', 'D', 5, 10,) |
|
290 | 284 | level = DEBUG |
|
291 | 285 | formatter = generic |
|
292 | 286 | |
|
293 | 287 | ################ |
|
294 | 288 | ## FORMATTERS ## |
|
295 | 289 | ################ |
|
296 | 290 | |
|
297 | 291 | [formatter_generic] |
|
298 | 292 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
299 | 293 | datefmt = %Y-%m-%d %H:%M:%S |
|
300 | 294 | |
|
301 | 295 | |
|
302 | 296 | .. _Subversion Red Book: http://svnbook.red-bean.com/en/1.7/svn-book.html#svn.ref.svn |
|
303 | 297 | |
|
304 | 298 | .. _Ask Ubuntu: http://askubuntu.com/questions/162391/how-do-i-fix-my-locale-issue |
@@ -1,1852 +1,1826 b'' | |||
|
1 | 1 | # Generated by pip2nix 0.4.0 |
|
2 | 2 | # See https://github.com/johbo/pip2nix |
|
3 | 3 | |
|
4 | 4 | { |
|
5 | 5 | Babel = super.buildPythonPackage { |
|
6 | 6 | name = "Babel-1.3"; |
|
7 | 7 | buildInputs = with self; []; |
|
8 | 8 | doCheck = false; |
|
9 | 9 | propagatedBuildInputs = with self; [pytz]; |
|
10 | 10 | src = fetchurl { |
|
11 | 11 | url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz"; |
|
12 | 12 | md5 = "5264ceb02717843cbc9ffce8e6e06bdb"; |
|
13 | 13 | }; |
|
14 | 14 | meta = { |
|
15 | 15 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
16 | 16 | }; |
|
17 | 17 | }; |
|
18 | 18 | Beaker = super.buildPythonPackage { |
|
19 | 19 | name = "Beaker-1.7.0"; |
|
20 | 20 | buildInputs = with self; []; |
|
21 | 21 | doCheck = false; |
|
22 | 22 | propagatedBuildInputs = with self; []; |
|
23 | 23 | src = fetchurl { |
|
24 | 24 | url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz"; |
|
25 | 25 | md5 = "386be3f7fe427358881eee4622b428b3"; |
|
26 | 26 | }; |
|
27 | 27 | meta = { |
|
28 | 28 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
29 | 29 | }; |
|
30 | 30 | }; |
|
31 | 31 | CProfileV = super.buildPythonPackage { |
|
32 | 32 | name = "CProfileV-1.0.6"; |
|
33 | 33 | buildInputs = with self; []; |
|
34 | 34 | doCheck = false; |
|
35 | 35 | propagatedBuildInputs = with self; [bottle]; |
|
36 | 36 | src = fetchurl { |
|
37 | 37 | url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz"; |
|
38 | 38 | md5 = "08c7c242b6e64237bc53c5d13537e03d"; |
|
39 | 39 | }; |
|
40 | 40 | meta = { |
|
41 | 41 | license = [ pkgs.lib.licenses.mit ]; |
|
42 | 42 | }; |
|
43 | 43 | }; |
|
44 | 44 | Chameleon = super.buildPythonPackage { |
|
45 | 45 | name = "Chameleon-2.24"; |
|
46 | 46 | buildInputs = with self; []; |
|
47 | 47 | doCheck = false; |
|
48 | 48 | propagatedBuildInputs = with self; []; |
|
49 | 49 | src = fetchurl { |
|
50 | 50 | url = "https://pypi.python.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz"; |
|
51 | 51 | md5 = "1b01f1f6533a8a11d0d2f2366dec5342"; |
|
52 | 52 | }; |
|
53 | 53 | meta = { |
|
54 | 54 | license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ]; |
|
55 | 55 | }; |
|
56 | 56 | }; |
|
57 | 57 | FormEncode = super.buildPythonPackage { |
|
58 | 58 | name = "FormEncode-1.2.4"; |
|
59 | 59 | buildInputs = with self; []; |
|
60 | 60 | doCheck = false; |
|
61 | 61 | propagatedBuildInputs = with self; []; |
|
62 | 62 | src = fetchurl { |
|
63 | 63 | url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz"; |
|
64 | 64 | md5 = "6bc17fb9aed8aea198975e888e2077f4"; |
|
65 | 65 | }; |
|
66 | 66 | meta = { |
|
67 | 67 | license = [ pkgs.lib.licenses.psfl ]; |
|
68 | 68 | }; |
|
69 | 69 | }; |
|
70 | 70 | Jinja2 = super.buildPythonPackage { |
|
71 | 71 | name = "Jinja2-2.7.3"; |
|
72 | 72 | buildInputs = with self; []; |
|
73 | 73 | doCheck = false; |
|
74 | 74 | propagatedBuildInputs = with self; [MarkupSafe]; |
|
75 | 75 | src = fetchurl { |
|
76 | 76 | url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz"; |
|
77 | 77 | md5 = "b9dffd2f3b43d673802fe857c8445b1a"; |
|
78 | 78 | }; |
|
79 | 79 | meta = { |
|
80 | 80 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
81 | 81 | }; |
|
82 | 82 | }; |
|
83 | 83 | Mako = super.buildPythonPackage { |
|
84 | 84 | name = "Mako-1.0.6"; |
|
85 | 85 | buildInputs = with self; []; |
|
86 | 86 | doCheck = false; |
|
87 | 87 | propagatedBuildInputs = with self; [MarkupSafe]; |
|
88 | 88 | src = fetchurl { |
|
89 | 89 | url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz"; |
|
90 | 90 | md5 = "a28e22a339080316b2acc352b9ee631c"; |
|
91 | 91 | }; |
|
92 | 92 | meta = { |
|
93 | 93 | license = [ pkgs.lib.licenses.mit ]; |
|
94 | 94 | }; |
|
95 | 95 | }; |
|
96 | 96 | Markdown = super.buildPythonPackage { |
|
97 | 97 | name = "Markdown-2.6.7"; |
|
98 | 98 | buildInputs = with self; []; |
|
99 | 99 | doCheck = false; |
|
100 | 100 | propagatedBuildInputs = with self; []; |
|
101 | 101 | src = fetchurl { |
|
102 | 102 | url = "https://pypi.python.org/packages/48/a4/fc6b002789c2239ac620ca963694c95b8f74e4747769cdf6021276939e74/Markdown-2.6.7.zip"; |
|
103 | 103 | md5 = "632710a7474bbb74a82084392251061f"; |
|
104 | 104 | }; |
|
105 | 105 | meta = { |
|
106 | 106 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
107 | 107 | }; |
|
108 | 108 | }; |
|
109 | 109 | MarkupSafe = super.buildPythonPackage { |
|
110 | 110 | name = "MarkupSafe-0.23"; |
|
111 | 111 | buildInputs = with self; []; |
|
112 | 112 | doCheck = false; |
|
113 | 113 | propagatedBuildInputs = with self; []; |
|
114 | 114 | src = fetchurl { |
|
115 | 115 | url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz"; |
|
116 | 116 | md5 = "f5ab3deee4c37cd6a922fb81e730da6e"; |
|
117 | 117 | }; |
|
118 | 118 | meta = { |
|
119 | 119 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
120 | 120 | }; |
|
121 | 121 | }; |
|
122 | 122 | MySQL-python = super.buildPythonPackage { |
|
123 | 123 | name = "MySQL-python-1.2.5"; |
|
124 | 124 | buildInputs = with self; []; |
|
125 | 125 | doCheck = false; |
|
126 | 126 | propagatedBuildInputs = with self; []; |
|
127 | 127 | src = fetchurl { |
|
128 | 128 | url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip"; |
|
129 | 129 | md5 = "654f75b302db6ed8dc5a898c625e030c"; |
|
130 | 130 | }; |
|
131 | 131 | meta = { |
|
132 | 132 | license = [ pkgs.lib.licenses.gpl1 ]; |
|
133 | 133 | }; |
|
134 | 134 | }; |
|
135 | 135 | Paste = super.buildPythonPackage { |
|
136 | 136 | name = "Paste-2.0.3"; |
|
137 | 137 | buildInputs = with self; []; |
|
138 | 138 | doCheck = false; |
|
139 | 139 | propagatedBuildInputs = with self; [six]; |
|
140 | 140 | src = fetchurl { |
|
141 | 141 | url = "https://pypi.python.org/packages/30/c3/5c2f7c7a02e4f58d4454353fa1c32c94f79fa4e36d07a67c0ac295ea369e/Paste-2.0.3.tar.gz"; |
|
142 | 142 | md5 = "1231e14eae62fa7ed76e9130b04bc61e"; |
|
143 | 143 | }; |
|
144 | 144 | meta = { |
|
145 | 145 | license = [ pkgs.lib.licenses.mit ]; |
|
146 | 146 | }; |
|
147 | 147 | }; |
|
148 | 148 | PasteDeploy = super.buildPythonPackage { |
|
149 | 149 | name = "PasteDeploy-1.5.2"; |
|
150 | 150 | buildInputs = with self; []; |
|
151 | 151 | doCheck = false; |
|
152 | 152 | propagatedBuildInputs = with self; []; |
|
153 | 153 | src = fetchurl { |
|
154 | 154 | url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz"; |
|
155 | 155 | md5 = "352b7205c78c8de4987578d19431af3b"; |
|
156 | 156 | }; |
|
157 | 157 | meta = { |
|
158 | 158 | license = [ pkgs.lib.licenses.mit ]; |
|
159 | 159 | }; |
|
160 | 160 | }; |
|
161 | 161 | PasteScript = super.buildPythonPackage { |
|
162 | 162 | name = "PasteScript-1.7.5"; |
|
163 | 163 | buildInputs = with self; []; |
|
164 | 164 | doCheck = false; |
|
165 | 165 | propagatedBuildInputs = with self; [Paste PasteDeploy]; |
|
166 | 166 | src = fetchurl { |
|
167 | 167 | url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz"; |
|
168 | 168 | md5 = "4c72d78dcb6bb993f30536842c16af4d"; |
|
169 | 169 | }; |
|
170 | 170 | meta = { |
|
171 | 171 | license = [ pkgs.lib.licenses.mit ]; |
|
172 | 172 | }; |
|
173 | 173 | }; |
|
174 | 174 | Pygments = super.buildPythonPackage { |
|
175 | 175 | name = "Pygments-2.2.0"; |
|
176 | 176 | buildInputs = with self; []; |
|
177 | 177 | doCheck = false; |
|
178 | 178 | propagatedBuildInputs = with self; []; |
|
179 | 179 | src = fetchurl { |
|
180 | 180 | url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz"; |
|
181 | 181 | md5 = "13037baca42f16917cbd5ad2fab50844"; |
|
182 | 182 | }; |
|
183 | 183 | meta = { |
|
184 | 184 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
185 | 185 | }; |
|
186 | 186 | }; |
|
187 | 187 | Pylons = super.buildPythonPackage { |
|
188 | 188 | name = "Pylons-1.0.2.dev20170205"; |
|
189 | 189 | buildInputs = with self; []; |
|
190 | 190 | doCheck = false; |
|
191 | 191 | propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb]; |
|
192 | 192 | src = fetchurl { |
|
193 | 193 | url = "https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f"; |
|
194 | 194 | md5 = "f26633726fa2cd3a340316ee6a5d218f"; |
|
195 | 195 | }; |
|
196 | 196 | meta = { |
|
197 | 197 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
198 | 198 | }; |
|
199 | 199 | }; |
|
200 | Pyro4 = super.buildPythonPackage { | |
|
201 | name = "Pyro4-4.41"; | |
|
202 | buildInputs = with self; []; | |
|
203 | doCheck = false; | |
|
204 | propagatedBuildInputs = with self; [serpent]; | |
|
205 | src = fetchurl { | |
|
206 | url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz"; | |
|
207 | md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c"; | |
|
208 | }; | |
|
209 | meta = { | |
|
210 | license = [ pkgs.lib.licenses.mit ]; | |
|
211 | }; | |
|
212 | }; | |
|
213 | 200 | Routes = super.buildPythonPackage { |
|
214 | 201 | name = "Routes-1.13"; |
|
215 | 202 | buildInputs = with self; []; |
|
216 | 203 | doCheck = false; |
|
217 | 204 | propagatedBuildInputs = with self; [repoze.lru]; |
|
218 | 205 | src = fetchurl { |
|
219 | 206 | url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz"; |
|
220 | 207 | md5 = "d527b0ab7dd9172b1275a41f97448783"; |
|
221 | 208 | }; |
|
222 | 209 | meta = { |
|
223 | 210 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
224 | 211 | }; |
|
225 | 212 | }; |
|
226 | 213 | SQLAlchemy = super.buildPythonPackage { |
|
227 | 214 | name = "SQLAlchemy-0.9.9"; |
|
228 | 215 | buildInputs = with self; []; |
|
229 | 216 | doCheck = false; |
|
230 | 217 | propagatedBuildInputs = with self; []; |
|
231 | 218 | src = fetchurl { |
|
232 | 219 | url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz"; |
|
233 | 220 | md5 = "8a10a9bd13ed3336ef7333ac2cc679ff"; |
|
234 | 221 | }; |
|
235 | 222 | meta = { |
|
236 | 223 | license = [ pkgs.lib.licenses.mit ]; |
|
237 | 224 | }; |
|
238 | 225 | }; |
|
239 | 226 | Sphinx = super.buildPythonPackage { |
|
240 | 227 | name = "Sphinx-1.2.2"; |
|
241 | 228 | buildInputs = with self; []; |
|
242 | 229 | doCheck = false; |
|
243 | 230 | propagatedBuildInputs = with self; [Pygments docutils Jinja2]; |
|
244 | 231 | src = fetchurl { |
|
245 | 232 | url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz"; |
|
246 | 233 | md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4"; |
|
247 | 234 | }; |
|
248 | 235 | meta = { |
|
249 | 236 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
250 | 237 | }; |
|
251 | 238 | }; |
|
252 | 239 | Tempita = super.buildPythonPackage { |
|
253 | 240 | name = "Tempita-0.5.2"; |
|
254 | 241 | buildInputs = with self; []; |
|
255 | 242 | doCheck = false; |
|
256 | 243 | propagatedBuildInputs = with self; []; |
|
257 | 244 | src = fetchurl { |
|
258 | 245 | url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz"; |
|
259 | 246 | md5 = "4c2f17bb9d481821c41b6fbee904cea1"; |
|
260 | 247 | }; |
|
261 | 248 | meta = { |
|
262 | 249 | license = [ pkgs.lib.licenses.mit ]; |
|
263 | 250 | }; |
|
264 | 251 | }; |
|
265 | 252 | URLObject = super.buildPythonPackage { |
|
266 | 253 | name = "URLObject-2.4.0"; |
|
267 | 254 | buildInputs = with self; []; |
|
268 | 255 | doCheck = false; |
|
269 | 256 | propagatedBuildInputs = with self; []; |
|
270 | 257 | src = fetchurl { |
|
271 | 258 | url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz"; |
|
272 | 259 | md5 = "2ed819738a9f0a3051f31dc9924e3065"; |
|
273 | 260 | }; |
|
274 | 261 | meta = { |
|
275 | 262 | license = [ ]; |
|
276 | 263 | }; |
|
277 | 264 | }; |
|
278 | 265 | WebError = super.buildPythonPackage { |
|
279 | 266 | name = "WebError-0.10.3"; |
|
280 | 267 | buildInputs = with self; []; |
|
281 | 268 | doCheck = false; |
|
282 | 269 | propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste]; |
|
283 | 270 | src = fetchurl { |
|
284 | 271 | url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz"; |
|
285 | 272 | md5 = "84b9990b0baae6fd440b1e60cdd06f9a"; |
|
286 | 273 | }; |
|
287 | 274 | meta = { |
|
288 | 275 | license = [ pkgs.lib.licenses.mit ]; |
|
289 | 276 | }; |
|
290 | 277 | }; |
|
291 | 278 | WebHelpers = super.buildPythonPackage { |
|
292 | 279 | name = "WebHelpers-1.3"; |
|
293 | 280 | buildInputs = with self; []; |
|
294 | 281 | doCheck = false; |
|
295 | 282 | propagatedBuildInputs = with self; [MarkupSafe]; |
|
296 | 283 | src = fetchurl { |
|
297 | 284 | url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz"; |
|
298 | 285 | md5 = "32749ffadfc40fea51075a7def32588b"; |
|
299 | 286 | }; |
|
300 | 287 | meta = { |
|
301 | 288 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
302 | 289 | }; |
|
303 | 290 | }; |
|
304 | 291 | WebHelpers2 = super.buildPythonPackage { |
|
305 | 292 | name = "WebHelpers2-2.0"; |
|
306 | 293 | buildInputs = with self; []; |
|
307 | 294 | doCheck = false; |
|
308 | 295 | propagatedBuildInputs = with self; [MarkupSafe six]; |
|
309 | 296 | src = fetchurl { |
|
310 | 297 | url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz"; |
|
311 | 298 | md5 = "0f6b68d70c12ee0aed48c00b24da13d3"; |
|
312 | 299 | }; |
|
313 | 300 | meta = { |
|
314 | 301 | license = [ pkgs.lib.licenses.mit ]; |
|
315 | 302 | }; |
|
316 | 303 | }; |
|
317 | 304 | WebOb = super.buildPythonPackage { |
|
318 | 305 | name = "WebOb-1.3.1"; |
|
319 | 306 | buildInputs = with self; []; |
|
320 | 307 | doCheck = false; |
|
321 | 308 | propagatedBuildInputs = with self; []; |
|
322 | 309 | src = fetchurl { |
|
323 | 310 | url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz"; |
|
324 | 311 | md5 = "20918251c5726956ba8fef22d1556177"; |
|
325 | 312 | }; |
|
326 | 313 | meta = { |
|
327 | 314 | license = [ pkgs.lib.licenses.mit ]; |
|
328 | 315 | }; |
|
329 | 316 | }; |
|
330 | 317 | WebTest = super.buildPythonPackage { |
|
331 | 318 | name = "WebTest-1.4.3"; |
|
332 | 319 | buildInputs = with self; []; |
|
333 | 320 | doCheck = false; |
|
334 | 321 | propagatedBuildInputs = with self; [WebOb]; |
|
335 | 322 | src = fetchurl { |
|
336 | 323 | url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip"; |
|
337 | 324 | md5 = "631ce728bed92c681a4020a36adbc353"; |
|
338 | 325 | }; |
|
339 | 326 | meta = { |
|
340 | 327 | license = [ pkgs.lib.licenses.mit ]; |
|
341 | 328 | }; |
|
342 | 329 | }; |
|
343 | 330 | Whoosh = super.buildPythonPackage { |
|
344 | 331 | name = "Whoosh-2.7.4"; |
|
345 | 332 | buildInputs = with self; []; |
|
346 | 333 | doCheck = false; |
|
347 | 334 | propagatedBuildInputs = with self; []; |
|
348 | 335 | src = fetchurl { |
|
349 | 336 | url = "https://pypi.python.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz"; |
|
350 | 337 | md5 = "c2710105f20b3e29936bd2357383c325"; |
|
351 | 338 | }; |
|
352 | 339 | meta = { |
|
353 | 340 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ]; |
|
354 | 341 | }; |
|
355 | 342 | }; |
|
356 | 343 | alembic = super.buildPythonPackage { |
|
357 | 344 | name = "alembic-0.8.4"; |
|
358 | 345 | buildInputs = with self; []; |
|
359 | 346 | doCheck = false; |
|
360 | 347 | propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor]; |
|
361 | 348 | src = fetchurl { |
|
362 | 349 | url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz"; |
|
363 | 350 | md5 = "5f95d8ee62b443f9b37eb5bee76c582d"; |
|
364 | 351 | }; |
|
365 | 352 | meta = { |
|
366 | 353 | license = [ pkgs.lib.licenses.mit ]; |
|
367 | 354 | }; |
|
368 | 355 | }; |
|
369 | 356 | amqplib = super.buildPythonPackage { |
|
370 | 357 | name = "amqplib-1.0.2"; |
|
371 | 358 | buildInputs = with self; []; |
|
372 | 359 | doCheck = false; |
|
373 | 360 | propagatedBuildInputs = with self; []; |
|
374 | 361 | src = fetchurl { |
|
375 | 362 | url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz"; |
|
376 | 363 | md5 = "5c92f17fbedd99b2b4a836d4352d1e2f"; |
|
377 | 364 | }; |
|
378 | 365 | meta = { |
|
379 | 366 | license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ]; |
|
380 | 367 | }; |
|
381 | 368 | }; |
|
382 | 369 | anyjson = super.buildPythonPackage { |
|
383 | 370 | name = "anyjson-0.3.3"; |
|
384 | 371 | buildInputs = with self; []; |
|
385 | 372 | doCheck = false; |
|
386 | 373 | propagatedBuildInputs = with self; []; |
|
387 | 374 | src = fetchurl { |
|
388 | 375 | url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz"; |
|
389 | 376 | md5 = "2ea28d6ec311aeeebaf993cb3008b27c"; |
|
390 | 377 | }; |
|
391 | 378 | meta = { |
|
392 | 379 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
393 | 380 | }; |
|
394 | 381 | }; |
|
395 | 382 | appenlight-client = super.buildPythonPackage { |
|
396 | 383 | name = "appenlight-client-0.6.14"; |
|
397 | 384 | buildInputs = with self; []; |
|
398 | 385 | doCheck = false; |
|
399 | 386 | propagatedBuildInputs = with self; [WebOb requests]; |
|
400 | 387 | src = fetchurl { |
|
401 | 388 | url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz"; |
|
402 | 389 | md5 = "578c69b09f4356d898fff1199b98a95c"; |
|
403 | 390 | }; |
|
404 | 391 | meta = { |
|
405 | 392 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ]; |
|
406 | 393 | }; |
|
407 | 394 | }; |
|
408 | 395 | authomatic = super.buildPythonPackage { |
|
409 | 396 | name = "authomatic-0.1.0.post1"; |
|
410 | 397 | buildInputs = with self; []; |
|
411 | 398 | doCheck = false; |
|
412 | 399 | propagatedBuildInputs = with self; []; |
|
413 | 400 | src = fetchurl { |
|
414 | 401 | url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz"; |
|
415 | 402 | md5 = "be3f3ce08747d776aae6d6cc8dcb49a9"; |
|
416 | 403 | }; |
|
417 | 404 | meta = { |
|
418 | 405 | license = [ pkgs.lib.licenses.mit ]; |
|
419 | 406 | }; |
|
420 | 407 | }; |
|
421 | 408 | backport-ipaddress = super.buildPythonPackage { |
|
422 | 409 | name = "backport-ipaddress-0.1"; |
|
423 | 410 | buildInputs = with self; []; |
|
424 | 411 | doCheck = false; |
|
425 | 412 | propagatedBuildInputs = with self; []; |
|
426 | 413 | src = fetchurl { |
|
427 | 414 | url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz"; |
|
428 | 415 | md5 = "9c1f45f4361f71b124d7293a60006c05"; |
|
429 | 416 | }; |
|
430 | 417 | meta = { |
|
431 | 418 | license = [ pkgs.lib.licenses.psfl ]; |
|
432 | 419 | }; |
|
433 | 420 | }; |
|
434 | 421 | backports.shutil-get-terminal-size = super.buildPythonPackage { |
|
435 | 422 | name = "backports.shutil-get-terminal-size-1.0.0"; |
|
436 | 423 | buildInputs = with self; []; |
|
437 | 424 | doCheck = false; |
|
438 | 425 | propagatedBuildInputs = with self; []; |
|
439 | 426 | src = fetchurl { |
|
440 | 427 | url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz"; |
|
441 | 428 | md5 = "03267762480bd86b50580dc19dff3c66"; |
|
442 | 429 | }; |
|
443 | 430 | meta = { |
|
444 | 431 | license = [ pkgs.lib.licenses.mit ]; |
|
445 | 432 | }; |
|
446 | 433 | }; |
|
447 | 434 | bottle = super.buildPythonPackage { |
|
448 | 435 | name = "bottle-0.12.8"; |
|
449 | 436 | buildInputs = with self; []; |
|
450 | 437 | doCheck = false; |
|
451 | 438 | propagatedBuildInputs = with self; []; |
|
452 | 439 | src = fetchurl { |
|
453 | 440 | url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz"; |
|
454 | 441 | md5 = "13132c0a8f607bf860810a6ee9064c5b"; |
|
455 | 442 | }; |
|
456 | 443 | meta = { |
|
457 | 444 | license = [ pkgs.lib.licenses.mit ]; |
|
458 | 445 | }; |
|
459 | 446 | }; |
|
460 | 447 | bumpversion = super.buildPythonPackage { |
|
461 | 448 | name = "bumpversion-0.5.3"; |
|
462 | 449 | buildInputs = with self; []; |
|
463 | 450 | doCheck = false; |
|
464 | 451 | propagatedBuildInputs = with self; []; |
|
465 | 452 | src = fetchurl { |
|
466 | 453 | url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz"; |
|
467 | 454 | md5 = "c66a3492eafcf5ad4b024be9fca29820"; |
|
468 | 455 | }; |
|
469 | 456 | meta = { |
|
470 | 457 | license = [ pkgs.lib.licenses.mit ]; |
|
471 | 458 | }; |
|
472 | 459 | }; |
|
473 | 460 | celery = super.buildPythonPackage { |
|
474 | 461 | name = "celery-2.2.10"; |
|
475 | 462 | buildInputs = with self; []; |
|
476 | 463 | doCheck = false; |
|
477 | 464 | propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing]; |
|
478 | 465 | src = fetchurl { |
|
479 | 466 | url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz"; |
|
480 | 467 | md5 = "898bc87e54f278055b561316ba73e222"; |
|
481 | 468 | }; |
|
482 | 469 | meta = { |
|
483 | 470 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
484 | 471 | }; |
|
485 | 472 | }; |
|
486 | 473 | channelstream = super.buildPythonPackage { |
|
487 | 474 | name = "channelstream-0.5.2"; |
|
488 | 475 | buildInputs = with self; []; |
|
489 | 476 | doCheck = false; |
|
490 | 477 | propagatedBuildInputs = with self; [gevent ws4py pyramid pyramid-jinja2 itsdangerous requests six]; |
|
491 | 478 | src = fetchurl { |
|
492 | 479 | url = "https://pypi.python.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz"; |
|
493 | 480 | md5 = "1c5eb2a8a405be6f1073da94da6d81d3"; |
|
494 | 481 | }; |
|
495 | 482 | meta = { |
|
496 | 483 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
497 | 484 | }; |
|
498 | 485 | }; |
|
499 | 486 | click = super.buildPythonPackage { |
|
500 | 487 | name = "click-5.1"; |
|
501 | 488 | buildInputs = with self; []; |
|
502 | 489 | doCheck = false; |
|
503 | 490 | propagatedBuildInputs = with self; []; |
|
504 | 491 | src = fetchurl { |
|
505 | 492 | url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz"; |
|
506 | 493 | md5 = "9c5323008cccfe232a8b161fc8196d41"; |
|
507 | 494 | }; |
|
508 | 495 | meta = { |
|
509 | 496 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
510 | 497 | }; |
|
511 | 498 | }; |
|
512 | 499 | colander = super.buildPythonPackage { |
|
513 | 500 | name = "colander-1.2"; |
|
514 | 501 | buildInputs = with self; []; |
|
515 | 502 | doCheck = false; |
|
516 | 503 | propagatedBuildInputs = with self; [translationstring iso8601]; |
|
517 | 504 | src = fetchurl { |
|
518 | 505 | url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz"; |
|
519 | 506 | md5 = "83db21b07936a0726e588dae1914b9ed"; |
|
520 | 507 | }; |
|
521 | 508 | meta = { |
|
522 | 509 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
523 | 510 | }; |
|
524 | 511 | }; |
|
525 | 512 | configobj = super.buildPythonPackage { |
|
526 | 513 | name = "configobj-5.0.6"; |
|
527 | 514 | buildInputs = with self; []; |
|
528 | 515 | doCheck = false; |
|
529 | 516 | propagatedBuildInputs = with self; [six]; |
|
530 | 517 | src = fetchurl { |
|
531 | 518 | url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz"; |
|
532 | 519 | md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6"; |
|
533 | 520 | }; |
|
534 | 521 | meta = { |
|
535 | 522 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
536 | 523 | }; |
|
537 | 524 | }; |
|
538 | 525 | cov-core = super.buildPythonPackage { |
|
539 | 526 | name = "cov-core-1.15.0"; |
|
540 | 527 | buildInputs = with self; []; |
|
541 | 528 | doCheck = false; |
|
542 | 529 | propagatedBuildInputs = with self; [coverage]; |
|
543 | 530 | src = fetchurl { |
|
544 | 531 | url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz"; |
|
545 | 532 | md5 = "f519d4cb4c4e52856afb14af52919fe6"; |
|
546 | 533 | }; |
|
547 | 534 | meta = { |
|
548 | 535 | license = [ pkgs.lib.licenses.mit ]; |
|
549 | 536 | }; |
|
550 | 537 | }; |
|
551 | 538 | coverage = super.buildPythonPackage { |
|
552 | 539 | name = "coverage-3.7.1"; |
|
553 | 540 | buildInputs = with self; []; |
|
554 | 541 | doCheck = false; |
|
555 | 542 | propagatedBuildInputs = with self; []; |
|
556 | 543 | src = fetchurl { |
|
557 | 544 | url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz"; |
|
558 | 545 | md5 = "c47b36ceb17eaff3ecfab3bcd347d0df"; |
|
559 | 546 | }; |
|
560 | 547 | meta = { |
|
561 | 548 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
562 | 549 | }; |
|
563 | 550 | }; |
|
564 | 551 | cssselect = super.buildPythonPackage { |
|
565 | 552 | name = "cssselect-0.9.1"; |
|
566 | 553 | buildInputs = with self; []; |
|
567 | 554 | doCheck = false; |
|
568 | 555 | propagatedBuildInputs = with self; []; |
|
569 | 556 | src = fetchurl { |
|
570 | 557 | url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz"; |
|
571 | 558 | md5 = "c74f45966277dc7a0f768b9b0f3522ac"; |
|
572 | 559 | }; |
|
573 | 560 | meta = { |
|
574 | 561 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
575 | 562 | }; |
|
576 | 563 | }; |
|
577 | 564 | decorator = super.buildPythonPackage { |
|
578 | 565 | name = "decorator-3.4.2"; |
|
579 | 566 | buildInputs = with self; []; |
|
580 | 567 | doCheck = false; |
|
581 | 568 | propagatedBuildInputs = with self; []; |
|
582 | 569 | src = fetchurl { |
|
583 | 570 | url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz"; |
|
584 | 571 | md5 = "9e0536870d2b83ae27d58dbf22582f4d"; |
|
585 | 572 | }; |
|
586 | 573 | meta = { |
|
587 | 574 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
588 | 575 | }; |
|
589 | 576 | }; |
|
590 | 577 | deform = super.buildPythonPackage { |
|
591 | 578 | name = "deform-2.0a2"; |
|
592 | 579 | buildInputs = with self; []; |
|
593 | 580 | doCheck = false; |
|
594 | 581 | propagatedBuildInputs = with self; [Chameleon colander peppercorn translationstring zope.deprecation]; |
|
595 | 582 | src = fetchurl { |
|
596 | 583 | url = "https://pypi.python.org/packages/8d/b3/aab57e81da974a806dc9c5fa024a6404720f890a6dcf2e80885e3cb4609a/deform-2.0a2.tar.gz"; |
|
597 | 584 | md5 = "7a90d41f7fbc18002ce74f39bd90a5e4"; |
|
598 | 585 | }; |
|
599 | 586 | meta = { |
|
600 | 587 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
601 | 588 | }; |
|
602 | 589 | }; |
|
603 | 590 | docutils = super.buildPythonPackage { |
|
604 | 591 | name = "docutils-0.12"; |
|
605 | 592 | buildInputs = with self; []; |
|
606 | 593 | doCheck = false; |
|
607 | 594 | propagatedBuildInputs = with self; []; |
|
608 | 595 | src = fetchurl { |
|
609 | 596 | url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz"; |
|
610 | 597 | md5 = "4622263b62c5c771c03502afa3157768"; |
|
611 | 598 | }; |
|
612 | 599 | meta = { |
|
613 | 600 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ]; |
|
614 | 601 | }; |
|
615 | 602 | }; |
|
616 | 603 | dogpile.cache = super.buildPythonPackage { |
|
617 | 604 | name = "dogpile.cache-0.6.1"; |
|
618 | 605 | buildInputs = with self; []; |
|
619 | 606 | doCheck = false; |
|
620 | 607 | propagatedBuildInputs = with self; []; |
|
621 | 608 | src = fetchurl { |
|
622 | 609 | url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz"; |
|
623 | 610 | md5 = "35d7fb30f22bbd0685763d894dd079a9"; |
|
624 | 611 | }; |
|
625 | 612 | meta = { |
|
626 | 613 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
627 | 614 | }; |
|
628 | 615 | }; |
|
629 | 616 | dogpile.core = super.buildPythonPackage { |
|
630 | 617 | name = "dogpile.core-0.4.1"; |
|
631 | 618 | buildInputs = with self; []; |
|
632 | 619 | doCheck = false; |
|
633 | 620 | propagatedBuildInputs = with self; []; |
|
634 | 621 | src = fetchurl { |
|
635 | 622 | url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz"; |
|
636 | 623 | md5 = "01cb19f52bba3e95c9b560f39341f045"; |
|
637 | 624 | }; |
|
638 | 625 | meta = { |
|
639 | 626 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
640 | 627 | }; |
|
641 | 628 | }; |
|
642 | 629 | ecdsa = super.buildPythonPackage { |
|
643 | 630 | name = "ecdsa-0.11"; |
|
644 | 631 | buildInputs = with self; []; |
|
645 | 632 | doCheck = false; |
|
646 | 633 | propagatedBuildInputs = with self; []; |
|
647 | 634 | src = fetchurl { |
|
648 | 635 | url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz"; |
|
649 | 636 | md5 = "8ef586fe4dbb156697d756900cb41d7c"; |
|
650 | 637 | }; |
|
651 | 638 | meta = { |
|
652 | 639 | license = [ pkgs.lib.licenses.mit ]; |
|
653 | 640 | }; |
|
654 | 641 | }; |
|
655 | 642 | elasticsearch = super.buildPythonPackage { |
|
656 | 643 | name = "elasticsearch-2.3.0"; |
|
657 | 644 | buildInputs = with self; []; |
|
658 | 645 | doCheck = false; |
|
659 | 646 | propagatedBuildInputs = with self; [urllib3]; |
|
660 | 647 | src = fetchurl { |
|
661 | 648 | url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz"; |
|
662 | 649 | md5 = "2550f3b51629cf1ef9636608af92c340"; |
|
663 | 650 | }; |
|
664 | 651 | meta = { |
|
665 | 652 | license = [ pkgs.lib.licenses.asl20 ]; |
|
666 | 653 | }; |
|
667 | 654 | }; |
|
668 | 655 | elasticsearch-dsl = super.buildPythonPackage { |
|
669 | 656 | name = "elasticsearch-dsl-2.2.0"; |
|
670 | 657 | buildInputs = with self; []; |
|
671 | 658 | doCheck = false; |
|
672 | 659 | propagatedBuildInputs = with self; [six python-dateutil elasticsearch]; |
|
673 | 660 | src = fetchurl { |
|
674 | 661 | url = "https://pypi.python.org/packages/66/2f/52a086968788e58461641570f45c3207a52d46ebbe9b77dc22b6a8ffda66/elasticsearch-dsl-2.2.0.tar.gz"; |
|
675 | 662 | md5 = "fa6bd3c87ea3caa8f0f051bc37c53221"; |
|
676 | 663 | }; |
|
677 | 664 | meta = { |
|
678 | 665 | license = [ pkgs.lib.licenses.asl20 ]; |
|
679 | 666 | }; |
|
680 | 667 | }; |
|
681 | 668 | enum34 = super.buildPythonPackage { |
|
682 | 669 | name = "enum34-1.1.6"; |
|
683 | 670 | buildInputs = with self; []; |
|
684 | 671 | doCheck = false; |
|
685 | 672 | propagatedBuildInputs = with self; []; |
|
686 | 673 | src = fetchurl { |
|
687 | 674 | url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz"; |
|
688 | 675 | md5 = "5f13a0841a61f7fc295c514490d120d0"; |
|
689 | 676 | }; |
|
690 | 677 | meta = { |
|
691 | 678 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
692 | 679 | }; |
|
693 | 680 | }; |
|
694 | 681 | future = super.buildPythonPackage { |
|
695 | 682 | name = "future-0.14.3"; |
|
696 | 683 | buildInputs = with self; []; |
|
697 | 684 | doCheck = false; |
|
698 | 685 | propagatedBuildInputs = with self; []; |
|
699 | 686 | src = fetchurl { |
|
700 | 687 | url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz"; |
|
701 | 688 | md5 = "e94079b0bd1fc054929e8769fc0f6083"; |
|
702 | 689 | }; |
|
703 | 690 | meta = { |
|
704 | 691 | license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ]; |
|
705 | 692 | }; |
|
706 | 693 | }; |
|
707 | 694 | futures = super.buildPythonPackage { |
|
708 | 695 | name = "futures-3.0.2"; |
|
709 | 696 | buildInputs = with self; []; |
|
710 | 697 | doCheck = false; |
|
711 | 698 | propagatedBuildInputs = with self; []; |
|
712 | 699 | src = fetchurl { |
|
713 | 700 | url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz"; |
|
714 | 701 | md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a"; |
|
715 | 702 | }; |
|
716 | 703 | meta = { |
|
717 | 704 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
718 | 705 | }; |
|
719 | 706 | }; |
|
720 | 707 | gevent = super.buildPythonPackage { |
|
721 | 708 | name = "gevent-1.1.2"; |
|
722 | 709 | buildInputs = with self; []; |
|
723 | 710 | doCheck = false; |
|
724 | 711 | propagatedBuildInputs = with self; [greenlet]; |
|
725 | 712 | src = fetchurl { |
|
726 | 713 | url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz"; |
|
727 | 714 | md5 = "bb32a2f852a4997138014d5007215c6e"; |
|
728 | 715 | }; |
|
729 | 716 | meta = { |
|
730 | 717 | license = [ pkgs.lib.licenses.mit ]; |
|
731 | 718 | }; |
|
732 | 719 | }; |
|
733 | 720 | gnureadline = super.buildPythonPackage { |
|
734 | 721 | name = "gnureadline-6.3.3"; |
|
735 | 722 | buildInputs = with self; []; |
|
736 | 723 | doCheck = false; |
|
737 | 724 | propagatedBuildInputs = with self; []; |
|
738 | 725 | src = fetchurl { |
|
739 | 726 | url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz"; |
|
740 | 727 | md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c"; |
|
741 | 728 | }; |
|
742 | 729 | meta = { |
|
743 | 730 | license = [ pkgs.lib.licenses.gpl1 ]; |
|
744 | 731 | }; |
|
745 | 732 | }; |
|
746 | 733 | gprof2dot = super.buildPythonPackage { |
|
747 | 734 | name = "gprof2dot-2016.10.13"; |
|
748 | 735 | buildInputs = with self; []; |
|
749 | 736 | doCheck = false; |
|
750 | 737 | propagatedBuildInputs = with self; []; |
|
751 | 738 | src = fetchurl { |
|
752 | 739 | url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz"; |
|
753 | 740 | md5 = "0125401f15fd2afe1df686a76c64a4fd"; |
|
754 | 741 | }; |
|
755 | 742 | meta = { |
|
756 | 743 | license = [ { fullName = "LGPL"; } ]; |
|
757 | 744 | }; |
|
758 | 745 | }; |
|
759 | 746 | greenlet = super.buildPythonPackage { |
|
760 | 747 | name = "greenlet-0.4.10"; |
|
761 | 748 | buildInputs = with self; []; |
|
762 | 749 | doCheck = false; |
|
763 | 750 | propagatedBuildInputs = with self; []; |
|
764 | 751 | src = fetchurl { |
|
765 | 752 | url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip"; |
|
766 | 753 | md5 = "bed0c4b3b896702131f4d5c72f87c41d"; |
|
767 | 754 | }; |
|
768 | 755 | meta = { |
|
769 | 756 | license = [ pkgs.lib.licenses.mit ]; |
|
770 | 757 | }; |
|
771 | 758 | }; |
|
772 | 759 | gunicorn = super.buildPythonPackage { |
|
773 | 760 | name = "gunicorn-19.6.0"; |
|
774 | 761 | buildInputs = with self; []; |
|
775 | 762 | doCheck = false; |
|
776 | 763 | propagatedBuildInputs = with self; []; |
|
777 | 764 | src = fetchurl { |
|
778 | 765 | url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz"; |
|
779 | 766 | md5 = "338e5e8a83ea0f0625f768dba4597530"; |
|
780 | 767 | }; |
|
781 | 768 | meta = { |
|
782 | 769 | license = [ pkgs.lib.licenses.mit ]; |
|
783 | 770 | }; |
|
784 | 771 | }; |
|
785 | 772 | infrae.cache = super.buildPythonPackage { |
|
786 | 773 | name = "infrae.cache-1.0.1"; |
|
787 | 774 | buildInputs = with self; []; |
|
788 | 775 | doCheck = false; |
|
789 | 776 | propagatedBuildInputs = with self; [Beaker repoze.lru]; |
|
790 | 777 | src = fetchurl { |
|
791 | 778 | url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz"; |
|
792 | 779 | md5 = "b09076a766747e6ed2a755cc62088e32"; |
|
793 | 780 | }; |
|
794 | 781 | meta = { |
|
795 | 782 | license = [ pkgs.lib.licenses.zpt21 ]; |
|
796 | 783 | }; |
|
797 | 784 | }; |
|
798 | 785 | invoke = super.buildPythonPackage { |
|
799 | 786 | name = "invoke-0.13.0"; |
|
800 | 787 | buildInputs = with self; []; |
|
801 | 788 | doCheck = false; |
|
802 | 789 | propagatedBuildInputs = with self; []; |
|
803 | 790 | src = fetchurl { |
|
804 | 791 | url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz"; |
|
805 | 792 | md5 = "c0d1ed4bfb34eaab551662d8cfee6540"; |
|
806 | 793 | }; |
|
807 | 794 | meta = { |
|
808 | 795 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
809 | 796 | }; |
|
810 | 797 | }; |
|
811 | 798 | ipdb = super.buildPythonPackage { |
|
812 | 799 | name = "ipdb-0.10.1"; |
|
813 | 800 | buildInputs = with self; []; |
|
814 | 801 | doCheck = false; |
|
815 | 802 | propagatedBuildInputs = with self; [ipython setuptools]; |
|
816 | 803 | src = fetchurl { |
|
817 | 804 | url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz"; |
|
818 | 805 | md5 = "4aeab65f633ddc98ebdb5eebf08dc713"; |
|
819 | 806 | }; |
|
820 | 807 | meta = { |
|
821 | 808 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
822 | 809 | }; |
|
823 | 810 | }; |
|
824 | 811 | ipython = super.buildPythonPackage { |
|
825 | 812 | name = "ipython-5.1.0"; |
|
826 | 813 | buildInputs = with self; []; |
|
827 | 814 | doCheck = false; |
|
828 | 815 | propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit Pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect]; |
|
829 | 816 | src = fetchurl { |
|
830 | 817 | url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz"; |
|
831 | 818 | md5 = "47c8122420f65b58784cb4b9b4af35e3"; |
|
832 | 819 | }; |
|
833 | 820 | meta = { |
|
834 | 821 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
835 | 822 | }; |
|
836 | 823 | }; |
|
837 | 824 | ipython-genutils = super.buildPythonPackage { |
|
838 | 825 | name = "ipython-genutils-0.1.0"; |
|
839 | 826 | buildInputs = with self; []; |
|
840 | 827 | doCheck = false; |
|
841 | 828 | propagatedBuildInputs = with self; []; |
|
842 | 829 | src = fetchurl { |
|
843 | 830 | url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz"; |
|
844 | 831 | md5 = "9a8afbe0978adbcbfcb3b35b2d015a56"; |
|
845 | 832 | }; |
|
846 | 833 | meta = { |
|
847 | 834 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
848 | 835 | }; |
|
849 | 836 | }; |
|
850 | 837 | iso8601 = super.buildPythonPackage { |
|
851 | 838 | name = "iso8601-0.1.11"; |
|
852 | 839 | buildInputs = with self; []; |
|
853 | 840 | doCheck = false; |
|
854 | 841 | propagatedBuildInputs = with self; []; |
|
855 | 842 | src = fetchurl { |
|
856 | 843 | url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz"; |
|
857 | 844 | md5 = "b06d11cd14a64096f907086044f0fe38"; |
|
858 | 845 | }; |
|
859 | 846 | meta = { |
|
860 | 847 | license = [ pkgs.lib.licenses.mit ]; |
|
861 | 848 | }; |
|
862 | 849 | }; |
|
863 | 850 | itsdangerous = super.buildPythonPackage { |
|
864 | 851 | name = "itsdangerous-0.24"; |
|
865 | 852 | buildInputs = with self; []; |
|
866 | 853 | doCheck = false; |
|
867 | 854 | propagatedBuildInputs = with self; []; |
|
868 | 855 | src = fetchurl { |
|
869 | 856 | url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz"; |
|
870 | 857 | md5 = "a3d55aa79369aef5345c036a8a26307f"; |
|
871 | 858 | }; |
|
872 | 859 | meta = { |
|
873 | 860 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
874 | 861 | }; |
|
875 | 862 | }; |
|
876 | 863 | kombu = super.buildPythonPackage { |
|
877 | 864 | name = "kombu-1.5.1"; |
|
878 | 865 | buildInputs = with self; []; |
|
879 | 866 | doCheck = false; |
|
880 | 867 | propagatedBuildInputs = with self; [anyjson amqplib]; |
|
881 | 868 | src = fetchurl { |
|
882 | 869 | url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz"; |
|
883 | 870 | md5 = "50662f3c7e9395b3d0721fb75d100b63"; |
|
884 | 871 | }; |
|
885 | 872 | meta = { |
|
886 | 873 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
887 | 874 | }; |
|
888 | 875 | }; |
|
889 | 876 | lxml = super.buildPythonPackage { |
|
890 | 877 | name = "lxml-3.4.4"; |
|
891 | 878 | buildInputs = with self; []; |
|
892 | 879 | doCheck = false; |
|
893 | 880 | propagatedBuildInputs = with self; []; |
|
894 | 881 | src = fetchurl { |
|
895 | 882 | url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz"; |
|
896 | 883 | md5 = "a9a65972afc173ec7a39c585f4eea69c"; |
|
897 | 884 | }; |
|
898 | 885 | meta = { |
|
899 | 886 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
900 | 887 | }; |
|
901 | 888 | }; |
|
902 | 889 | meld3 = super.buildPythonPackage { |
|
903 | 890 | name = "meld3-1.0.2"; |
|
904 | 891 | buildInputs = with self; []; |
|
905 | 892 | doCheck = false; |
|
906 | 893 | propagatedBuildInputs = with self; []; |
|
907 | 894 | src = fetchurl { |
|
908 | 895 | url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz"; |
|
909 | 896 | md5 = "3ccc78cd79cffd63a751ad7684c02c91"; |
|
910 | 897 | }; |
|
911 | 898 | meta = { |
|
912 | 899 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
913 | 900 | }; |
|
914 | 901 | }; |
|
915 | 902 | mock = super.buildPythonPackage { |
|
916 | 903 | name = "mock-1.0.1"; |
|
917 | 904 | buildInputs = with self; []; |
|
918 | 905 | doCheck = false; |
|
919 | 906 | propagatedBuildInputs = with self; []; |
|
920 | 907 | src = fetchurl { |
|
921 | 908 | url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip"; |
|
922 | 909 | md5 = "869f08d003c289a97c1a6610faf5e913"; |
|
923 | 910 | }; |
|
924 | 911 | meta = { |
|
925 | 912 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
926 | 913 | }; |
|
927 | 914 | }; |
|
928 | 915 | msgpack-python = super.buildPythonPackage { |
|
929 | 916 | name = "msgpack-python-0.4.8"; |
|
930 | 917 | buildInputs = with self; []; |
|
931 | 918 | doCheck = false; |
|
932 | 919 | propagatedBuildInputs = with self; []; |
|
933 | 920 | src = fetchurl { |
|
934 | 921 | url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz"; |
|
935 | 922 | md5 = "dcd854fb41ee7584ebbf35e049e6be98"; |
|
936 | 923 | }; |
|
937 | 924 | meta = { |
|
938 | 925 | license = [ pkgs.lib.licenses.asl20 ]; |
|
939 | 926 | }; |
|
940 | 927 | }; |
|
941 | 928 | nose = super.buildPythonPackage { |
|
942 | 929 | name = "nose-1.3.6"; |
|
943 | 930 | buildInputs = with self; []; |
|
944 | 931 | doCheck = false; |
|
945 | 932 | propagatedBuildInputs = with self; []; |
|
946 | 933 | src = fetchurl { |
|
947 | 934 | url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz"; |
|
948 | 935 | md5 = "0ca546d81ca8309080fc80cb389e7a16"; |
|
949 | 936 | }; |
|
950 | 937 | meta = { |
|
951 | 938 | license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ]; |
|
952 | 939 | }; |
|
953 | 940 | }; |
|
954 | 941 | objgraph = super.buildPythonPackage { |
|
955 | 942 | name = "objgraph-2.0.0"; |
|
956 | 943 | buildInputs = with self; []; |
|
957 | 944 | doCheck = false; |
|
958 | 945 | propagatedBuildInputs = with self; []; |
|
959 | 946 | src = fetchurl { |
|
960 | 947 | url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz"; |
|
961 | 948 | md5 = "25b0d5e5adc74aa63ead15699614159c"; |
|
962 | 949 | }; |
|
963 | 950 | meta = { |
|
964 | 951 | license = [ pkgs.lib.licenses.mit ]; |
|
965 | 952 | }; |
|
966 | 953 | }; |
|
967 | 954 | packaging = super.buildPythonPackage { |
|
968 | 955 | name = "packaging-15.2"; |
|
969 | 956 | buildInputs = with self; []; |
|
970 | 957 | doCheck = false; |
|
971 | 958 | propagatedBuildInputs = with self; []; |
|
972 | 959 | src = fetchurl { |
|
973 | 960 | url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz"; |
|
974 | 961 | md5 = "c16093476f6ced42128bf610e5db3784"; |
|
975 | 962 | }; |
|
976 | 963 | meta = { |
|
977 | 964 | license = [ pkgs.lib.licenses.asl20 ]; |
|
978 | 965 | }; |
|
979 | 966 | }; |
|
980 | 967 | paramiko = super.buildPythonPackage { |
|
981 | 968 | name = "paramiko-1.15.1"; |
|
982 | 969 | buildInputs = with self; []; |
|
983 | 970 | doCheck = false; |
|
984 | 971 | propagatedBuildInputs = with self; [pycrypto ecdsa]; |
|
985 | 972 | src = fetchurl { |
|
986 | 973 | url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz"; |
|
987 | 974 | md5 = "48c274c3f9b1282932567b21f6acf3b5"; |
|
988 | 975 | }; |
|
989 | 976 | meta = { |
|
990 | 977 | license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ]; |
|
991 | 978 | }; |
|
992 | 979 | }; |
|
993 | 980 | pathlib2 = super.buildPythonPackage { |
|
994 | 981 | name = "pathlib2-2.1.0"; |
|
995 | 982 | buildInputs = with self; []; |
|
996 | 983 | doCheck = false; |
|
997 | 984 | propagatedBuildInputs = with self; [six]; |
|
998 | 985 | src = fetchurl { |
|
999 | 986 | url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz"; |
|
1000 | 987 | md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2"; |
|
1001 | 988 | }; |
|
1002 | 989 | meta = { |
|
1003 | 990 | license = [ pkgs.lib.licenses.mit ]; |
|
1004 | 991 | }; |
|
1005 | 992 | }; |
|
1006 | 993 | peppercorn = super.buildPythonPackage { |
|
1007 | 994 | name = "peppercorn-0.5"; |
|
1008 | 995 | buildInputs = with self; []; |
|
1009 | 996 | doCheck = false; |
|
1010 | 997 | propagatedBuildInputs = with self; []; |
|
1011 | 998 | src = fetchurl { |
|
1012 | 999 | url = "https://pypi.python.org/packages/45/ec/a62ec317d1324a01567c5221b420742f094f05ee48097e5157d32be3755c/peppercorn-0.5.tar.gz"; |
|
1013 | 1000 | md5 = "f08efbca5790019ab45d76b7244abd40"; |
|
1014 | 1001 | }; |
|
1015 | 1002 | meta = { |
|
1016 | 1003 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1017 | 1004 | }; |
|
1018 | 1005 | }; |
|
1019 | 1006 | pexpect = super.buildPythonPackage { |
|
1020 | 1007 | name = "pexpect-4.2.1"; |
|
1021 | 1008 | buildInputs = with self; []; |
|
1022 | 1009 | doCheck = false; |
|
1023 | 1010 | propagatedBuildInputs = with self; [ptyprocess]; |
|
1024 | 1011 | src = fetchurl { |
|
1025 | 1012 | url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz"; |
|
1026 | 1013 | md5 = "3694410001a99dff83f0b500a1ca1c95"; |
|
1027 | 1014 | }; |
|
1028 | 1015 | meta = { |
|
1029 | 1016 | license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ]; |
|
1030 | 1017 | }; |
|
1031 | 1018 | }; |
|
1032 | 1019 | pickleshare = super.buildPythonPackage { |
|
1033 | 1020 | name = "pickleshare-0.7.4"; |
|
1034 | 1021 | buildInputs = with self; []; |
|
1035 | 1022 | doCheck = false; |
|
1036 | 1023 | propagatedBuildInputs = with self; [pathlib2]; |
|
1037 | 1024 | src = fetchurl { |
|
1038 | 1025 | url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz"; |
|
1039 | 1026 | md5 = "6a9e5dd8dfc023031f6b7b3f824cab12"; |
|
1040 | 1027 | }; |
|
1041 | 1028 | meta = { |
|
1042 | 1029 | license = [ pkgs.lib.licenses.mit ]; |
|
1043 | 1030 | }; |
|
1044 | 1031 | }; |
|
1045 | 1032 | prompt-toolkit = super.buildPythonPackage { |
|
1046 | 1033 | name = "prompt-toolkit-1.0.9"; |
|
1047 | 1034 | buildInputs = with self; []; |
|
1048 | 1035 | doCheck = false; |
|
1049 | 1036 | propagatedBuildInputs = with self; [six wcwidth]; |
|
1050 | 1037 | src = fetchurl { |
|
1051 | 1038 | url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz"; |
|
1052 | 1039 | md5 = "a39f91a54308fb7446b1a421c11f227c"; |
|
1053 | 1040 | }; |
|
1054 | 1041 | meta = { |
|
1055 | 1042 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1056 | 1043 | }; |
|
1057 | 1044 | }; |
|
1058 | 1045 | psutil = super.buildPythonPackage { |
|
1059 | 1046 | name = "psutil-4.3.1"; |
|
1060 | 1047 | buildInputs = with self; []; |
|
1061 | 1048 | doCheck = false; |
|
1062 | 1049 | propagatedBuildInputs = with self; []; |
|
1063 | 1050 | src = fetchurl { |
|
1064 | 1051 | url = "https://pypi.python.org/packages/78/cc/f267a1371f229bf16db6a4e604428c3b032b823b83155bd33cef45e49a53/psutil-4.3.1.tar.gz"; |
|
1065 | 1052 | md5 = "199a366dba829c88bddaf5b41d19ddc0"; |
|
1066 | 1053 | }; |
|
1067 | 1054 | meta = { |
|
1068 | 1055 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1069 | 1056 | }; |
|
1070 | 1057 | }; |
|
1071 | 1058 | psycopg2 = super.buildPythonPackage { |
|
1072 | 1059 | name = "psycopg2-2.6.1"; |
|
1073 | 1060 | buildInputs = with self; []; |
|
1074 | 1061 | doCheck = false; |
|
1075 | 1062 | propagatedBuildInputs = with self; []; |
|
1076 | 1063 | src = fetchurl { |
|
1077 | 1064 | url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz"; |
|
1078 | 1065 | md5 = "842b44f8c95517ed5b792081a2370da1"; |
|
1079 | 1066 | }; |
|
1080 | 1067 | meta = { |
|
1081 | 1068 | license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ]; |
|
1082 | 1069 | }; |
|
1083 | 1070 | }; |
|
1084 | 1071 | ptyprocess = super.buildPythonPackage { |
|
1085 | 1072 | name = "ptyprocess-0.5.1"; |
|
1086 | 1073 | buildInputs = with self; []; |
|
1087 | 1074 | doCheck = false; |
|
1088 | 1075 | propagatedBuildInputs = with self; []; |
|
1089 | 1076 | src = fetchurl { |
|
1090 | 1077 | url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz"; |
|
1091 | 1078 | md5 = "94e537122914cc9ec9c1eadcd36e73a1"; |
|
1092 | 1079 | }; |
|
1093 | 1080 | meta = { |
|
1094 | 1081 | license = [ ]; |
|
1095 | 1082 | }; |
|
1096 | 1083 | }; |
|
1097 | 1084 | py = super.buildPythonPackage { |
|
1098 | 1085 | name = "py-1.4.31"; |
|
1099 | 1086 | buildInputs = with self; []; |
|
1100 | 1087 | doCheck = false; |
|
1101 | 1088 | propagatedBuildInputs = with self; []; |
|
1102 | 1089 | src = fetchurl { |
|
1103 | 1090 | url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz"; |
|
1104 | 1091 | md5 = "5d2c63c56dc3f2115ec35c066ecd582b"; |
|
1105 | 1092 | }; |
|
1106 | 1093 | meta = { |
|
1107 | 1094 | license = [ pkgs.lib.licenses.mit ]; |
|
1108 | 1095 | }; |
|
1109 | 1096 | }; |
|
1110 | 1097 | py-bcrypt = super.buildPythonPackage { |
|
1111 | 1098 | name = "py-bcrypt-0.4"; |
|
1112 | 1099 | buildInputs = with self; []; |
|
1113 | 1100 | doCheck = false; |
|
1114 | 1101 | propagatedBuildInputs = with self; []; |
|
1115 | 1102 | src = fetchurl { |
|
1116 | 1103 | url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz"; |
|
1117 | 1104 | md5 = "dd8b367d6b716a2ea2e72392525f4e36"; |
|
1118 | 1105 | }; |
|
1119 | 1106 | meta = { |
|
1120 | 1107 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1121 | 1108 | }; |
|
1122 | 1109 | }; |
|
1123 | 1110 | py-gfm = super.buildPythonPackage { |
|
1124 | 1111 | name = "py-gfm-0.1.3"; |
|
1125 | 1112 | buildInputs = with self; []; |
|
1126 | 1113 | doCheck = false; |
|
1127 | 1114 | propagatedBuildInputs = with self; [setuptools Markdown]; |
|
1128 | 1115 | src = fetchurl { |
|
1129 | 1116 | url = "https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16"; |
|
1130 | 1117 | md5 = "0d0d5385bfb629eea636a80b9c2bfd16"; |
|
1131 | 1118 | }; |
|
1132 | 1119 | meta = { |
|
1133 | 1120 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1134 | 1121 | }; |
|
1135 | 1122 | }; |
|
1136 | 1123 | pycrypto = super.buildPythonPackage { |
|
1137 | 1124 | name = "pycrypto-2.6.1"; |
|
1138 | 1125 | buildInputs = with self; []; |
|
1139 | 1126 | doCheck = false; |
|
1140 | 1127 | propagatedBuildInputs = with self; []; |
|
1141 | 1128 | src = fetchurl { |
|
1142 | 1129 | url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz"; |
|
1143 | 1130 | md5 = "55a61a054aa66812daf5161a0d5d7eda"; |
|
1144 | 1131 | }; |
|
1145 | 1132 | meta = { |
|
1146 | 1133 | license = [ pkgs.lib.licenses.publicDomain ]; |
|
1147 | 1134 | }; |
|
1148 | 1135 | }; |
|
1149 | 1136 | pycurl = super.buildPythonPackage { |
|
1150 | 1137 | name = "pycurl-7.19.5"; |
|
1151 | 1138 | buildInputs = with self; []; |
|
1152 | 1139 | doCheck = false; |
|
1153 | 1140 | propagatedBuildInputs = with self; []; |
|
1154 | 1141 | src = fetchurl { |
|
1155 | 1142 | url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz"; |
|
1156 | 1143 | md5 = "47b4eac84118e2606658122104e62072"; |
|
1157 | 1144 | }; |
|
1158 | 1145 | meta = { |
|
1159 | 1146 | license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ]; |
|
1160 | 1147 | }; |
|
1161 | 1148 | }; |
|
1162 | 1149 | pyflakes = super.buildPythonPackage { |
|
1163 | 1150 | name = "pyflakes-0.8.1"; |
|
1164 | 1151 | buildInputs = with self; []; |
|
1165 | 1152 | doCheck = false; |
|
1166 | 1153 | propagatedBuildInputs = with self; []; |
|
1167 | 1154 | src = fetchurl { |
|
1168 | 1155 | url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz"; |
|
1169 | 1156 | md5 = "905fe91ad14b912807e8fdc2ac2e2c23"; |
|
1170 | 1157 | }; |
|
1171 | 1158 | meta = { |
|
1172 | 1159 | license = [ pkgs.lib.licenses.mit ]; |
|
1173 | 1160 | }; |
|
1174 | 1161 | }; |
|
1175 | 1162 | pygments-markdown-lexer = super.buildPythonPackage { |
|
1176 | 1163 | name = "pygments-markdown-lexer-0.1.0.dev39"; |
|
1177 | 1164 | buildInputs = with self; []; |
|
1178 | 1165 | doCheck = false; |
|
1179 | 1166 | propagatedBuildInputs = with self; [Pygments]; |
|
1180 | 1167 | src = fetchurl { |
|
1181 | 1168 | url = "https://pypi.python.org/packages/c3/12/674cdee66635d638cedb2c5d9c85ce507b7b2f91bdba29e482f1b1160ff6/pygments-markdown-lexer-0.1.0.dev39.zip"; |
|
1182 | 1169 | md5 = "6360fe0f6d1f896e35b7a0142ce6459c"; |
|
1183 | 1170 | }; |
|
1184 | 1171 | meta = { |
|
1185 | 1172 | license = [ pkgs.lib.licenses.asl20 ]; |
|
1186 | 1173 | }; |
|
1187 | 1174 | }; |
|
1188 | 1175 | pyparsing = super.buildPythonPackage { |
|
1189 | 1176 | name = "pyparsing-1.5.7"; |
|
1190 | 1177 | buildInputs = with self; []; |
|
1191 | 1178 | doCheck = false; |
|
1192 | 1179 | propagatedBuildInputs = with self; []; |
|
1193 | 1180 | src = fetchurl { |
|
1194 | 1181 | url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip"; |
|
1195 | 1182 | md5 = "b86854857a368d6ccb4d5b6e76d0637f"; |
|
1196 | 1183 | }; |
|
1197 | 1184 | meta = { |
|
1198 | 1185 | license = [ pkgs.lib.licenses.mit ]; |
|
1199 | 1186 | }; |
|
1200 | 1187 | }; |
|
1201 | 1188 | pyramid = super.buildPythonPackage { |
|
1202 | 1189 | name = "pyramid-1.7.4"; |
|
1203 | 1190 | buildInputs = with self; []; |
|
1204 | 1191 | doCheck = false; |
|
1205 | 1192 | propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy]; |
|
1206 | 1193 | src = fetchurl { |
|
1207 | 1194 | url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz"; |
|
1208 | 1195 | md5 = "6ef1dfdcff9136d04490410757c4c446"; |
|
1209 | 1196 | }; |
|
1210 | 1197 | meta = { |
|
1211 | 1198 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1212 | 1199 | }; |
|
1213 | 1200 | }; |
|
1214 | 1201 | pyramid-beaker = super.buildPythonPackage { |
|
1215 | 1202 | name = "pyramid-beaker-0.8"; |
|
1216 | 1203 | buildInputs = with self; []; |
|
1217 | 1204 | doCheck = false; |
|
1218 | 1205 | propagatedBuildInputs = with self; [pyramid Beaker]; |
|
1219 | 1206 | src = fetchurl { |
|
1220 | 1207 | url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz"; |
|
1221 | 1208 | md5 = "22f14be31b06549f80890e2c63a93834"; |
|
1222 | 1209 | }; |
|
1223 | 1210 | meta = { |
|
1224 | 1211 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1225 | 1212 | }; |
|
1226 | 1213 | }; |
|
1227 | 1214 | pyramid-debugtoolbar = super.buildPythonPackage { |
|
1228 | 1215 | name = "pyramid-debugtoolbar-3.0.5"; |
|
1229 | 1216 | buildInputs = with self; []; |
|
1230 | 1217 | doCheck = false; |
|
1231 | 1218 | propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments]; |
|
1232 | 1219 | src = fetchurl { |
|
1233 | 1220 | url = "https://pypi.python.org/packages/64/0e/df00bfb55605900e7a2f7e4a18dd83575a6651688e297d5a0aa4c208fd7d/pyramid_debugtoolbar-3.0.5.tar.gz"; |
|
1234 | 1221 | md5 = "aebab8c3bfdc6f89e4d3adc1d126538e"; |
|
1235 | 1222 | }; |
|
1236 | 1223 | meta = { |
|
1237 | 1224 | license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ]; |
|
1238 | 1225 | }; |
|
1239 | 1226 | }; |
|
1240 | 1227 | pyramid-jinja2 = super.buildPythonPackage { |
|
1241 | 1228 | name = "pyramid-jinja2-2.5"; |
|
1242 | 1229 | buildInputs = with self; []; |
|
1243 | 1230 | doCheck = false; |
|
1244 | 1231 | propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe]; |
|
1245 | 1232 | src = fetchurl { |
|
1246 | 1233 | url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz"; |
|
1247 | 1234 | md5 = "07cb6547204ac5e6f0b22a954ccee928"; |
|
1248 | 1235 | }; |
|
1249 | 1236 | meta = { |
|
1250 | 1237 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1251 | 1238 | }; |
|
1252 | 1239 | }; |
|
1253 | 1240 | pyramid-mako = super.buildPythonPackage { |
|
1254 | 1241 | name = "pyramid-mako-1.0.2"; |
|
1255 | 1242 | buildInputs = with self; []; |
|
1256 | 1243 | doCheck = false; |
|
1257 | 1244 | propagatedBuildInputs = with self; [pyramid Mako]; |
|
1258 | 1245 | src = fetchurl { |
|
1259 | 1246 | url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz"; |
|
1260 | 1247 | md5 = "ee25343a97eb76bd90abdc2a774eb48a"; |
|
1261 | 1248 | }; |
|
1262 | 1249 | meta = { |
|
1263 | 1250 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1264 | 1251 | }; |
|
1265 | 1252 | }; |
|
1266 | 1253 | pysqlite = super.buildPythonPackage { |
|
1267 | 1254 | name = "pysqlite-2.6.3"; |
|
1268 | 1255 | buildInputs = with self; []; |
|
1269 | 1256 | doCheck = false; |
|
1270 | 1257 | propagatedBuildInputs = with self; []; |
|
1271 | 1258 | src = fetchurl { |
|
1272 | 1259 | url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz"; |
|
1273 | 1260 | md5 = "7ff1cedee74646b50117acff87aa1cfa"; |
|
1274 | 1261 | }; |
|
1275 | 1262 | meta = { |
|
1276 | 1263 | license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ]; |
|
1277 | 1264 | }; |
|
1278 | 1265 | }; |
|
1279 | 1266 | pytest = super.buildPythonPackage { |
|
1280 | 1267 | name = "pytest-3.0.5"; |
|
1281 | 1268 | buildInputs = with self; []; |
|
1282 | 1269 | doCheck = false; |
|
1283 | 1270 | propagatedBuildInputs = with self; [py]; |
|
1284 | 1271 | src = fetchurl { |
|
1285 | 1272 | url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz"; |
|
1286 | 1273 | md5 = "cefd527b59332688bf5db4a10aa8a7cb"; |
|
1287 | 1274 | }; |
|
1288 | 1275 | meta = { |
|
1289 | 1276 | license = [ pkgs.lib.licenses.mit ]; |
|
1290 | 1277 | }; |
|
1291 | 1278 | }; |
|
1292 | 1279 | pytest-catchlog = super.buildPythonPackage { |
|
1293 | 1280 | name = "pytest-catchlog-1.2.2"; |
|
1294 | 1281 | buildInputs = with self; []; |
|
1295 | 1282 | doCheck = false; |
|
1296 | 1283 | propagatedBuildInputs = with self; [py pytest]; |
|
1297 | 1284 | src = fetchurl { |
|
1298 | 1285 | url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip"; |
|
1299 | 1286 | md5 = "09d890c54c7456c818102b7ff8c182c8"; |
|
1300 | 1287 | }; |
|
1301 | 1288 | meta = { |
|
1302 | 1289 | license = [ pkgs.lib.licenses.mit ]; |
|
1303 | 1290 | }; |
|
1304 | 1291 | }; |
|
1305 | 1292 | pytest-cov = super.buildPythonPackage { |
|
1306 | 1293 | name = "pytest-cov-2.4.0"; |
|
1307 | 1294 | buildInputs = with self; []; |
|
1308 | 1295 | doCheck = false; |
|
1309 | 1296 | propagatedBuildInputs = with self; [pytest coverage]; |
|
1310 | 1297 | src = fetchurl { |
|
1311 | 1298 | url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz"; |
|
1312 | 1299 | md5 = "2fda09677d232acc99ec1b3c5831e33f"; |
|
1313 | 1300 | }; |
|
1314 | 1301 | meta = { |
|
1315 | 1302 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ]; |
|
1316 | 1303 | }; |
|
1317 | 1304 | }; |
|
1318 | 1305 | pytest-profiling = super.buildPythonPackage { |
|
1319 | 1306 | name = "pytest-profiling-1.2.2"; |
|
1320 | 1307 | buildInputs = with self; []; |
|
1321 | 1308 | doCheck = false; |
|
1322 | 1309 | propagatedBuildInputs = with self; [six pytest gprof2dot]; |
|
1323 | 1310 | src = fetchurl { |
|
1324 | 1311 | url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz"; |
|
1325 | 1312 | md5 = "0a16d7dda2d23b91e9730fa4558cf728"; |
|
1326 | 1313 | }; |
|
1327 | 1314 | meta = { |
|
1328 | 1315 | license = [ pkgs.lib.licenses.mit ]; |
|
1329 | 1316 | }; |
|
1330 | 1317 | }; |
|
1331 | 1318 | pytest-runner = super.buildPythonPackage { |
|
1332 | 1319 | name = "pytest-runner-2.9"; |
|
1333 | 1320 | buildInputs = with self; []; |
|
1334 | 1321 | doCheck = false; |
|
1335 | 1322 | propagatedBuildInputs = with self; []; |
|
1336 | 1323 | src = fetchurl { |
|
1337 | 1324 | url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz"; |
|
1338 | 1325 | md5 = "2212a2e34404b0960b2fdc2c469247b2"; |
|
1339 | 1326 | }; |
|
1340 | 1327 | meta = { |
|
1341 | 1328 | license = [ pkgs.lib.licenses.mit ]; |
|
1342 | 1329 | }; |
|
1343 | 1330 | }; |
|
1344 | 1331 | pytest-sugar = super.buildPythonPackage { |
|
1345 | 1332 | name = "pytest-sugar-0.7.1"; |
|
1346 | 1333 | buildInputs = with self; []; |
|
1347 | 1334 | doCheck = false; |
|
1348 | 1335 | propagatedBuildInputs = with self; [pytest termcolor]; |
|
1349 | 1336 | src = fetchurl { |
|
1350 | 1337 | url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz"; |
|
1351 | 1338 | md5 = "7400f7c11f3d572b2c2a3b60352d35fe"; |
|
1352 | 1339 | }; |
|
1353 | 1340 | meta = { |
|
1354 | 1341 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1355 | 1342 | }; |
|
1356 | 1343 | }; |
|
1357 | 1344 | pytest-timeout = super.buildPythonPackage { |
|
1358 | 1345 | name = "pytest-timeout-1.2.0"; |
|
1359 | 1346 | buildInputs = with self; []; |
|
1360 | 1347 | doCheck = false; |
|
1361 | 1348 | propagatedBuildInputs = with self; [pytest]; |
|
1362 | 1349 | src = fetchurl { |
|
1363 | 1350 | url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz"; |
|
1364 | 1351 | md5 = "83607d91aa163562c7ee835da57d061d"; |
|
1365 | 1352 | }; |
|
1366 | 1353 | meta = { |
|
1367 | 1354 | license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ]; |
|
1368 | 1355 | }; |
|
1369 | 1356 | }; |
|
1370 | 1357 | python-dateutil = super.buildPythonPackage { |
|
1371 | 1358 | name = "python-dateutil-1.5"; |
|
1372 | 1359 | buildInputs = with self; []; |
|
1373 | 1360 | doCheck = false; |
|
1374 | 1361 | propagatedBuildInputs = with self; []; |
|
1375 | 1362 | src = fetchurl { |
|
1376 | 1363 | url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz"; |
|
1377 | 1364 | md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5"; |
|
1378 | 1365 | }; |
|
1379 | 1366 | meta = { |
|
1380 | 1367 | license = [ pkgs.lib.licenses.psfl ]; |
|
1381 | 1368 | }; |
|
1382 | 1369 | }; |
|
1383 | 1370 | python-editor = super.buildPythonPackage { |
|
1384 | 1371 | name = "python-editor-1.0.3"; |
|
1385 | 1372 | buildInputs = with self; []; |
|
1386 | 1373 | doCheck = false; |
|
1387 | 1374 | propagatedBuildInputs = with self; []; |
|
1388 | 1375 | src = fetchurl { |
|
1389 | 1376 | url = "https://pypi.python.org/packages/65/1e/adf6e000ea5dc909aa420352d6ba37f16434c8a3c2fa030445411a1ed545/python-editor-1.0.3.tar.gz"; |
|
1390 | 1377 | md5 = "0aca5f2ef176ce68e98a5b7e31372835"; |
|
1391 | 1378 | }; |
|
1392 | 1379 | meta = { |
|
1393 | 1380 | license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ]; |
|
1394 | 1381 | }; |
|
1395 | 1382 | }; |
|
1396 | 1383 | python-ldap = super.buildPythonPackage { |
|
1397 | 1384 | name = "python-ldap-2.4.19"; |
|
1398 | 1385 | buildInputs = with self; []; |
|
1399 | 1386 | doCheck = false; |
|
1400 | 1387 | propagatedBuildInputs = with self; [setuptools]; |
|
1401 | 1388 | src = fetchurl { |
|
1402 | 1389 | url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz"; |
|
1403 | 1390 | md5 = "b941bf31d09739492aa19ef679e94ae3"; |
|
1404 | 1391 | }; |
|
1405 | 1392 | meta = { |
|
1406 | 1393 | license = [ pkgs.lib.licenses.psfl ]; |
|
1407 | 1394 | }; |
|
1408 | 1395 | }; |
|
1409 | 1396 | python-memcached = super.buildPythonPackage { |
|
1410 | 1397 | name = "python-memcached-1.57"; |
|
1411 | 1398 | buildInputs = with self; []; |
|
1412 | 1399 | doCheck = false; |
|
1413 | 1400 | propagatedBuildInputs = with self; [six]; |
|
1414 | 1401 | src = fetchurl { |
|
1415 | 1402 | url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz"; |
|
1416 | 1403 | md5 = "de21f64b42b2d961f3d4ad7beb5468a1"; |
|
1417 | 1404 | }; |
|
1418 | 1405 | meta = { |
|
1419 | 1406 | license = [ pkgs.lib.licenses.psfl ]; |
|
1420 | 1407 | }; |
|
1421 | 1408 | }; |
|
1422 | 1409 | python-pam = super.buildPythonPackage { |
|
1423 | 1410 | name = "python-pam-1.8.2"; |
|
1424 | 1411 | buildInputs = with self; []; |
|
1425 | 1412 | doCheck = false; |
|
1426 | 1413 | propagatedBuildInputs = with self; []; |
|
1427 | 1414 | src = fetchurl { |
|
1428 | 1415 | url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz"; |
|
1429 | 1416 | md5 = "db71b6b999246fb05d78ecfbe166629d"; |
|
1430 | 1417 | }; |
|
1431 | 1418 | meta = { |
|
1432 | 1419 | license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ]; |
|
1433 | 1420 | }; |
|
1434 | 1421 | }; |
|
1435 | 1422 | pytz = super.buildPythonPackage { |
|
1436 | 1423 | name = "pytz-2015.4"; |
|
1437 | 1424 | buildInputs = with self; []; |
|
1438 | 1425 | doCheck = false; |
|
1439 | 1426 | propagatedBuildInputs = with self; []; |
|
1440 | 1427 | src = fetchurl { |
|
1441 | 1428 | url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip"; |
|
1442 | 1429 | md5 = "233f2a2b370d03f9b5911700cc9ebf3c"; |
|
1443 | 1430 | }; |
|
1444 | 1431 | meta = { |
|
1445 | 1432 | license = [ pkgs.lib.licenses.mit ]; |
|
1446 | 1433 | }; |
|
1447 | 1434 | }; |
|
1448 | 1435 | pyzmq = super.buildPythonPackage { |
|
1449 | 1436 | name = "pyzmq-14.6.0"; |
|
1450 | 1437 | buildInputs = with self; []; |
|
1451 | 1438 | doCheck = false; |
|
1452 | 1439 | propagatedBuildInputs = with self; []; |
|
1453 | 1440 | src = fetchurl { |
|
1454 | 1441 | url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz"; |
|
1455 | 1442 | md5 = "395b5de95a931afa5b14c9349a5b8024"; |
|
1456 | 1443 | }; |
|
1457 | 1444 | meta = { |
|
1458 | 1445 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ]; |
|
1459 | 1446 | }; |
|
1460 | 1447 | }; |
|
1461 | 1448 | recaptcha-client = super.buildPythonPackage { |
|
1462 | 1449 | name = "recaptcha-client-1.0.6"; |
|
1463 | 1450 | buildInputs = with self; []; |
|
1464 | 1451 | doCheck = false; |
|
1465 | 1452 | propagatedBuildInputs = with self; []; |
|
1466 | 1453 | src = fetchurl { |
|
1467 | 1454 | url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz"; |
|
1468 | 1455 | md5 = "74228180f7e1fb76c4d7089160b0d919"; |
|
1469 | 1456 | }; |
|
1470 | 1457 | meta = { |
|
1471 | 1458 | license = [ { fullName = "MIT/X11"; } ]; |
|
1472 | 1459 | }; |
|
1473 | 1460 | }; |
|
1474 | 1461 | repoze.lru = super.buildPythonPackage { |
|
1475 | 1462 | name = "repoze.lru-0.6"; |
|
1476 | 1463 | buildInputs = with self; []; |
|
1477 | 1464 | doCheck = false; |
|
1478 | 1465 | propagatedBuildInputs = with self; []; |
|
1479 | 1466 | src = fetchurl { |
|
1480 | 1467 | url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz"; |
|
1481 | 1468 | md5 = "2c3b64b17a8e18b405f55d46173e14dd"; |
|
1482 | 1469 | }; |
|
1483 | 1470 | meta = { |
|
1484 | 1471 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1485 | 1472 | }; |
|
1486 | 1473 | }; |
|
1487 | 1474 | requests = super.buildPythonPackage { |
|
1488 | 1475 | name = "requests-2.9.1"; |
|
1489 | 1476 | buildInputs = with self; []; |
|
1490 | 1477 | doCheck = false; |
|
1491 | 1478 | propagatedBuildInputs = with self; []; |
|
1492 | 1479 | src = fetchurl { |
|
1493 | 1480 | url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz"; |
|
1494 | 1481 | md5 = "0b7f480d19012ec52bab78292efd976d"; |
|
1495 | 1482 | }; |
|
1496 | 1483 | meta = { |
|
1497 | 1484 | license = [ pkgs.lib.licenses.asl20 ]; |
|
1498 | 1485 | }; |
|
1499 | 1486 | }; |
|
1500 | 1487 | rhodecode-enterprise-ce = super.buildPythonPackage { |
|
1501 | 1488 | name = "rhodecode-enterprise-ce-4.7.0"; |
|
1502 | 1489 | buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage cssselect lxml configobj]; |
|
1503 | 1490 | doCheck = true; |
|
1504 |
propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons |
|
|
1491 | propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery channelstream colander decorator deform docutils gevent gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson subprocess32 waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt]; | |
|
1505 | 1492 | src = ./.; |
|
1506 | 1493 | meta = { |
|
1507 | 1494 | license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ]; |
|
1508 | 1495 | }; |
|
1509 | 1496 | }; |
|
1510 | 1497 | rhodecode-tools = super.buildPythonPackage { |
|
1511 | 1498 | name = "rhodecode-tools-0.11.0"; |
|
1512 | 1499 | buildInputs = with self; []; |
|
1513 | 1500 | doCheck = false; |
|
1514 | 1501 | propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests elasticsearch elasticsearch-dsl urllib3 Whoosh]; |
|
1515 | 1502 | src = fetchurl { |
|
1516 | 1503 | url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.11.0.tar.gz?md5=e5fd0a8363af08a0ced71b50ca9cce15"; |
|
1517 | 1504 | md5 = "e5fd0a8363af08a0ced71b50ca9cce15"; |
|
1518 | 1505 | }; |
|
1519 | 1506 | meta = { |
|
1520 | 1507 | license = [ { fullName = "AGPLv3 and Proprietary"; } ]; |
|
1521 | 1508 | }; |
|
1522 | 1509 | }; |
|
1523 | serpent = super.buildPythonPackage { | |
|
1524 | name = "serpent-1.15"; | |
|
1525 | buildInputs = with self; []; | |
|
1526 | doCheck = false; | |
|
1527 | propagatedBuildInputs = with self; []; | |
|
1528 | src = fetchurl { | |
|
1529 | url = "https://pypi.python.org/packages/7b/38/b2b27673a882ff2ea5871bb3e3e6b496ebbaafd1612e51990ffb158b9254/serpent-1.15.tar.gz"; | |
|
1530 | md5 = "e27b1aad5c218e16442f52abb7c7053a"; | |
|
1531 | }; | |
|
1532 | meta = { | |
|
1533 | license = [ pkgs.lib.licenses.mit ]; | |
|
1534 | }; | |
|
1535 | }; | |
|
1536 | 1510 | setproctitle = super.buildPythonPackage { |
|
1537 | 1511 | name = "setproctitle-1.1.8"; |
|
1538 | 1512 | buildInputs = with self; []; |
|
1539 | 1513 | doCheck = false; |
|
1540 | 1514 | propagatedBuildInputs = with self; []; |
|
1541 | 1515 | src = fetchurl { |
|
1542 | 1516 | url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz"; |
|
1543 | 1517 | md5 = "728f4c8c6031bbe56083a48594027edd"; |
|
1544 | 1518 | }; |
|
1545 | 1519 | meta = { |
|
1546 | 1520 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1547 | 1521 | }; |
|
1548 | 1522 | }; |
|
1549 | 1523 | setuptools = super.buildPythonPackage { |
|
1550 | 1524 | name = "setuptools-30.1.0"; |
|
1551 | 1525 | buildInputs = with self; []; |
|
1552 | 1526 | doCheck = false; |
|
1553 | 1527 | propagatedBuildInputs = with self; []; |
|
1554 | 1528 | src = fetchurl { |
|
1555 | 1529 | url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz"; |
|
1556 | 1530 | md5 = "cac497f42e5096ac8df29e38d3f81c3e"; |
|
1557 | 1531 | }; |
|
1558 | 1532 | meta = { |
|
1559 | 1533 | license = [ pkgs.lib.licenses.mit ]; |
|
1560 | 1534 | }; |
|
1561 | 1535 | }; |
|
1562 | 1536 | setuptools-scm = super.buildPythonPackage { |
|
1563 | 1537 | name = "setuptools-scm-1.15.0"; |
|
1564 | 1538 | buildInputs = with self; []; |
|
1565 | 1539 | doCheck = false; |
|
1566 | 1540 | propagatedBuildInputs = with self; []; |
|
1567 | 1541 | src = fetchurl { |
|
1568 | 1542 | url = "https://pypi.python.org/packages/80/b7/31b6ae5fcb188e37f7e31abe75f9be90490a5456a72860fa6e643f8a3cbc/setuptools_scm-1.15.0.tar.gz"; |
|
1569 | 1543 | md5 = "b6916c78ed6253d6602444fad4279c5b"; |
|
1570 | 1544 | }; |
|
1571 | 1545 | meta = { |
|
1572 | 1546 | license = [ pkgs.lib.licenses.mit ]; |
|
1573 | 1547 | }; |
|
1574 | 1548 | }; |
|
1575 | 1549 | simplegeneric = super.buildPythonPackage { |
|
1576 | 1550 | name = "simplegeneric-0.8.1"; |
|
1577 | 1551 | buildInputs = with self; []; |
|
1578 | 1552 | doCheck = false; |
|
1579 | 1553 | propagatedBuildInputs = with self; []; |
|
1580 | 1554 | src = fetchurl { |
|
1581 | 1555 | url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip"; |
|
1582 | 1556 | md5 = "f9c1fab00fd981be588fc32759f474e3"; |
|
1583 | 1557 | }; |
|
1584 | 1558 | meta = { |
|
1585 | 1559 | license = [ pkgs.lib.licenses.zpt21 ]; |
|
1586 | 1560 | }; |
|
1587 | 1561 | }; |
|
1588 | 1562 | simplejson = super.buildPythonPackage { |
|
1589 | 1563 | name = "simplejson-3.7.2"; |
|
1590 | 1564 | buildInputs = with self; []; |
|
1591 | 1565 | doCheck = false; |
|
1592 | 1566 | propagatedBuildInputs = with self; []; |
|
1593 | 1567 | src = fetchurl { |
|
1594 | 1568 | url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz"; |
|
1595 | 1569 | md5 = "a5fc7d05d4cb38492285553def5d4b46"; |
|
1596 | 1570 | }; |
|
1597 | 1571 | meta = { |
|
1598 | 1572 | license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ]; |
|
1599 | 1573 | }; |
|
1600 | 1574 | }; |
|
1601 | 1575 | six = super.buildPythonPackage { |
|
1602 | 1576 | name = "six-1.9.0"; |
|
1603 | 1577 | buildInputs = with self; []; |
|
1604 | 1578 | doCheck = false; |
|
1605 | 1579 | propagatedBuildInputs = with self; []; |
|
1606 | 1580 | src = fetchurl { |
|
1607 | 1581 | url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz"; |
|
1608 | 1582 | md5 = "476881ef4012262dfc8adc645ee786c4"; |
|
1609 | 1583 | }; |
|
1610 | 1584 | meta = { |
|
1611 | 1585 | license = [ pkgs.lib.licenses.mit ]; |
|
1612 | 1586 | }; |
|
1613 | 1587 | }; |
|
1614 | 1588 | subprocess32 = super.buildPythonPackage { |
|
1615 | 1589 | name = "subprocess32-3.2.6"; |
|
1616 | 1590 | buildInputs = with self; []; |
|
1617 | 1591 | doCheck = false; |
|
1618 | 1592 | propagatedBuildInputs = with self; []; |
|
1619 | 1593 | src = fetchurl { |
|
1620 | 1594 | url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz"; |
|
1621 | 1595 | md5 = "754c5ab9f533e764f931136974b618f1"; |
|
1622 | 1596 | }; |
|
1623 | 1597 | meta = { |
|
1624 | 1598 | license = [ pkgs.lib.licenses.psfl ]; |
|
1625 | 1599 | }; |
|
1626 | 1600 | }; |
|
1627 | 1601 | supervisor = super.buildPythonPackage { |
|
1628 | 1602 | name = "supervisor-3.3.1"; |
|
1629 | 1603 | buildInputs = with self; []; |
|
1630 | 1604 | doCheck = false; |
|
1631 | 1605 | propagatedBuildInputs = with self; [meld3]; |
|
1632 | 1606 | src = fetchurl { |
|
1633 | 1607 | url = "https://pypi.python.org/packages/80/37/964c0d53cbd328796b1aeb7abea4c0f7b0e8c7197ea9b0b9967b7d004def/supervisor-3.3.1.tar.gz"; |
|
1634 | 1608 | md5 = "202f760f9bf4930ec06557bac73e5cf2"; |
|
1635 | 1609 | }; |
|
1636 | 1610 | meta = { |
|
1637 | 1611 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1638 | 1612 | }; |
|
1639 | 1613 | }; |
|
1640 | 1614 | termcolor = super.buildPythonPackage { |
|
1641 | 1615 | name = "termcolor-1.1.0"; |
|
1642 | 1616 | buildInputs = with self; []; |
|
1643 | 1617 | doCheck = false; |
|
1644 | 1618 | propagatedBuildInputs = with self; []; |
|
1645 | 1619 | src = fetchurl { |
|
1646 | 1620 | url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz"; |
|
1647 | 1621 | md5 = "043e89644f8909d462fbbfa511c768df"; |
|
1648 | 1622 | }; |
|
1649 | 1623 | meta = { |
|
1650 | 1624 | license = [ pkgs.lib.licenses.mit ]; |
|
1651 | 1625 | }; |
|
1652 | 1626 | }; |
|
1653 | 1627 | traitlets = super.buildPythonPackage { |
|
1654 | 1628 | name = "traitlets-4.3.1"; |
|
1655 | 1629 | buildInputs = with self; []; |
|
1656 | 1630 | doCheck = false; |
|
1657 | 1631 | propagatedBuildInputs = with self; [ipython-genutils six decorator enum34]; |
|
1658 | 1632 | src = fetchurl { |
|
1659 | 1633 | url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz"; |
|
1660 | 1634 | md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98"; |
|
1661 | 1635 | }; |
|
1662 | 1636 | meta = { |
|
1663 | 1637 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1664 | 1638 | }; |
|
1665 | 1639 | }; |
|
1666 | 1640 | transifex-client = super.buildPythonPackage { |
|
1667 | 1641 | name = "transifex-client-0.10"; |
|
1668 | 1642 | buildInputs = with self; []; |
|
1669 | 1643 | doCheck = false; |
|
1670 | 1644 | propagatedBuildInputs = with self; []; |
|
1671 | 1645 | src = fetchurl { |
|
1672 | 1646 | url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz"; |
|
1673 | 1647 | md5 = "5549538d84b8eede6b254cd81ae024fa"; |
|
1674 | 1648 | }; |
|
1675 | 1649 | meta = { |
|
1676 | 1650 | license = [ pkgs.lib.licenses.gpl2 ]; |
|
1677 | 1651 | }; |
|
1678 | 1652 | }; |
|
1679 | 1653 | translationstring = super.buildPythonPackage { |
|
1680 | 1654 | name = "translationstring-1.3"; |
|
1681 | 1655 | buildInputs = with self; []; |
|
1682 | 1656 | doCheck = false; |
|
1683 | 1657 | propagatedBuildInputs = with self; []; |
|
1684 | 1658 | src = fetchurl { |
|
1685 | 1659 | url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz"; |
|
1686 | 1660 | md5 = "a4b62e0f3c189c783a1685b3027f7c90"; |
|
1687 | 1661 | }; |
|
1688 | 1662 | meta = { |
|
1689 | 1663 | license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ]; |
|
1690 | 1664 | }; |
|
1691 | 1665 | }; |
|
1692 | 1666 | trollius = super.buildPythonPackage { |
|
1693 | 1667 | name = "trollius-1.0.4"; |
|
1694 | 1668 | buildInputs = with self; []; |
|
1695 | 1669 | doCheck = false; |
|
1696 | 1670 | propagatedBuildInputs = with self; [futures]; |
|
1697 | 1671 | src = fetchurl { |
|
1698 | 1672 | url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz"; |
|
1699 | 1673 | md5 = "3631a464d49d0cbfd30ab2918ef2b783"; |
|
1700 | 1674 | }; |
|
1701 | 1675 | meta = { |
|
1702 | 1676 | license = [ pkgs.lib.licenses.asl20 ]; |
|
1703 | 1677 | }; |
|
1704 | 1678 | }; |
|
1705 | 1679 | uWSGI = super.buildPythonPackage { |
|
1706 | 1680 | name = "uWSGI-2.0.11.2"; |
|
1707 | 1681 | buildInputs = with self; []; |
|
1708 | 1682 | doCheck = false; |
|
1709 | 1683 | propagatedBuildInputs = with self; []; |
|
1710 | 1684 | src = fetchurl { |
|
1711 | 1685 | url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz"; |
|
1712 | 1686 | md5 = "1f02dcbee7f6f61de4b1fd68350cf16f"; |
|
1713 | 1687 | }; |
|
1714 | 1688 | meta = { |
|
1715 | 1689 | license = [ pkgs.lib.licenses.gpl2 ]; |
|
1716 | 1690 | }; |
|
1717 | 1691 | }; |
|
1718 | 1692 | urllib3 = super.buildPythonPackage { |
|
1719 | 1693 | name = "urllib3-1.16"; |
|
1720 | 1694 | buildInputs = with self; []; |
|
1721 | 1695 | doCheck = false; |
|
1722 | 1696 | propagatedBuildInputs = with self; []; |
|
1723 | 1697 | src = fetchurl { |
|
1724 | 1698 | url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz"; |
|
1725 | 1699 | md5 = "fcaab1c5385c57deeb7053d3d7d81d59"; |
|
1726 | 1700 | }; |
|
1727 | 1701 | meta = { |
|
1728 | 1702 | license = [ pkgs.lib.licenses.mit ]; |
|
1729 | 1703 | }; |
|
1730 | 1704 | }; |
|
1731 | 1705 | venusian = super.buildPythonPackage { |
|
1732 | 1706 | name = "venusian-1.0"; |
|
1733 | 1707 | buildInputs = with self; []; |
|
1734 | 1708 | doCheck = false; |
|
1735 | 1709 | propagatedBuildInputs = with self; []; |
|
1736 | 1710 | src = fetchurl { |
|
1737 | 1711 | url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz"; |
|
1738 | 1712 | md5 = "dccf2eafb7113759d60c86faf5538756"; |
|
1739 | 1713 | }; |
|
1740 | 1714 | meta = { |
|
1741 | 1715 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1742 | 1716 | }; |
|
1743 | 1717 | }; |
|
1744 | 1718 | waitress = super.buildPythonPackage { |
|
1745 | 1719 | name = "waitress-1.0.1"; |
|
1746 | 1720 | buildInputs = with self; []; |
|
1747 | 1721 | doCheck = false; |
|
1748 | 1722 | propagatedBuildInputs = with self; []; |
|
1749 | 1723 | src = fetchurl { |
|
1750 | 1724 | url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz"; |
|
1751 | 1725 | md5 = "dda92358a7569669086155923a46e57c"; |
|
1752 | 1726 | }; |
|
1753 | 1727 | meta = { |
|
1754 | 1728 | license = [ pkgs.lib.licenses.zpt21 ]; |
|
1755 | 1729 | }; |
|
1756 | 1730 | }; |
|
1757 | 1731 | wcwidth = super.buildPythonPackage { |
|
1758 | 1732 | name = "wcwidth-0.1.7"; |
|
1759 | 1733 | buildInputs = with self; []; |
|
1760 | 1734 | doCheck = false; |
|
1761 | 1735 | propagatedBuildInputs = with self; []; |
|
1762 | 1736 | src = fetchurl { |
|
1763 | 1737 | url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz"; |
|
1764 | 1738 | md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad"; |
|
1765 | 1739 | }; |
|
1766 | 1740 | meta = { |
|
1767 | 1741 | license = [ pkgs.lib.licenses.mit ]; |
|
1768 | 1742 | }; |
|
1769 | 1743 | }; |
|
1770 | 1744 | ws4py = super.buildPythonPackage { |
|
1771 | 1745 | name = "ws4py-0.3.5"; |
|
1772 | 1746 | buildInputs = with self; []; |
|
1773 | 1747 | doCheck = false; |
|
1774 | 1748 | propagatedBuildInputs = with self; []; |
|
1775 | 1749 | src = fetchurl { |
|
1776 | 1750 | url = "https://pypi.python.org/packages/b6/4f/34af703be86939629479e74d6e650e39f3bd73b3b09212c34e5125764cbc/ws4py-0.3.5.zip"; |
|
1777 | 1751 | md5 = "a261b75c20b980e55ce7451a3576a867"; |
|
1778 | 1752 | }; |
|
1779 | 1753 | meta = { |
|
1780 | 1754 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1781 | 1755 | }; |
|
1782 | 1756 | }; |
|
1783 | 1757 | wsgiref = super.buildPythonPackage { |
|
1784 | 1758 | name = "wsgiref-0.1.2"; |
|
1785 | 1759 | buildInputs = with self; []; |
|
1786 | 1760 | doCheck = false; |
|
1787 | 1761 | propagatedBuildInputs = with self; []; |
|
1788 | 1762 | src = fetchurl { |
|
1789 | 1763 | url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip"; |
|
1790 | 1764 | md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb"; |
|
1791 | 1765 | }; |
|
1792 | 1766 | meta = { |
|
1793 | 1767 | license = [ { fullName = "PSF or ZPL"; } ]; |
|
1794 | 1768 | }; |
|
1795 | 1769 | }; |
|
1796 | 1770 | zope.cachedescriptors = super.buildPythonPackage { |
|
1797 | 1771 | name = "zope.cachedescriptors-4.0.0"; |
|
1798 | 1772 | buildInputs = with self; []; |
|
1799 | 1773 | doCheck = false; |
|
1800 | 1774 | propagatedBuildInputs = with self; [setuptools]; |
|
1801 | 1775 | src = fetchurl { |
|
1802 | 1776 | url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz"; |
|
1803 | 1777 | md5 = "8d308de8c936792c8e758058fcb7d0f0"; |
|
1804 | 1778 | }; |
|
1805 | 1779 | meta = { |
|
1806 | 1780 | license = [ pkgs.lib.licenses.zpt21 ]; |
|
1807 | 1781 | }; |
|
1808 | 1782 | }; |
|
1809 | 1783 | zope.deprecation = super.buildPythonPackage { |
|
1810 | 1784 | name = "zope.deprecation-4.1.2"; |
|
1811 | 1785 | buildInputs = with self; []; |
|
1812 | 1786 | doCheck = false; |
|
1813 | 1787 | propagatedBuildInputs = with self; [setuptools]; |
|
1814 | 1788 | src = fetchurl { |
|
1815 | 1789 | url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz"; |
|
1816 | 1790 | md5 = "e9a663ded58f4f9f7881beb56cae2782"; |
|
1817 | 1791 | }; |
|
1818 | 1792 | meta = { |
|
1819 | 1793 | license = [ pkgs.lib.licenses.zpt21 ]; |
|
1820 | 1794 | }; |
|
1821 | 1795 | }; |
|
1822 | 1796 | zope.event = super.buildPythonPackage { |
|
1823 | 1797 | name = "zope.event-4.0.3"; |
|
1824 | 1798 | buildInputs = with self; []; |
|
1825 | 1799 | doCheck = false; |
|
1826 | 1800 | propagatedBuildInputs = with self; [setuptools]; |
|
1827 | 1801 | src = fetchurl { |
|
1828 | 1802 | url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz"; |
|
1829 | 1803 | md5 = "9a3780916332b18b8b85f522bcc3e249"; |
|
1830 | 1804 | }; |
|
1831 | 1805 | meta = { |
|
1832 | 1806 | license = [ pkgs.lib.licenses.zpt21 ]; |
|
1833 | 1807 | }; |
|
1834 | 1808 | }; |
|
1835 | 1809 | zope.interface = super.buildPythonPackage { |
|
1836 | 1810 | name = "zope.interface-4.1.3"; |
|
1837 | 1811 | buildInputs = with self; []; |
|
1838 | 1812 | doCheck = false; |
|
1839 | 1813 | propagatedBuildInputs = with self; [setuptools]; |
|
1840 | 1814 | src = fetchurl { |
|
1841 | 1815 | url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz"; |
|
1842 | 1816 | md5 = "9ae3d24c0c7415deb249dd1a132f0f79"; |
|
1843 | 1817 | }; |
|
1844 | 1818 | meta = { |
|
1845 | 1819 | license = [ pkgs.lib.licenses.zpt21 ]; |
|
1846 | 1820 | }; |
|
1847 | 1821 | }; |
|
1848 | 1822 | |
|
1849 | 1823 | ### Test requirements |
|
1850 | 1824 | |
|
1851 | 1825 | |
|
1852 | 1826 | } |
@@ -1,12 +1,11 b'' | |||
|
1 | 1 | [pytest] |
|
2 | 2 | testpaths = ./rhodecode |
|
3 | 3 | pylons_config = rhodecode/tests/rhodecode.ini |
|
4 | 4 | vcsserver_protocol = http |
|
5 | vcsserver_config_pyro4 = rhodecode/tests/vcsserver_pyro4.ini | |
|
6 | 5 | vcsserver_config_http = rhodecode/tests/vcsserver_http.ini |
|
7 | 6 | norecursedirs = tests/scripts |
|
8 | 7 | addopts = -k "not _BaseTest" |
|
9 | 8 | markers = |
|
10 | 9 | vcs_operations: Mark tests depending on a running RhodeCode instance. |
|
11 | 10 | xfail_backends: Mark tests as xfail for given backends. |
|
12 | 11 | skip_backends: Mark tests as skipped for given backends. |
@@ -1,131 +1,127 b'' | |||
|
1 | 1 | ## core |
|
2 | 2 | setuptools==30.1.0 |
|
3 | 3 | setuptools-scm==1.15.0 |
|
4 | 4 | |
|
5 | 5 | amqplib==1.0.2 |
|
6 | 6 | anyjson==0.3.3 |
|
7 | 7 | authomatic==0.1.0.post1 |
|
8 | 8 | Babel==1.3 |
|
9 | 9 | backport-ipaddress==0.1 |
|
10 | 10 | Beaker==1.7.0 |
|
11 | 11 | celery==2.2.10 |
|
12 | 12 | Chameleon==2.24 |
|
13 | 13 | channelstream==0.5.2 |
|
14 | 14 | click==5.1 |
|
15 | 15 | colander==1.2 |
|
16 | 16 | configobj==5.0.6 |
|
17 | 17 | decorator==3.4.2 |
|
18 | 18 | deform==2.0a2 |
|
19 | 19 | docutils==0.12 |
|
20 | 20 | dogpile.cache==0.6.1 |
|
21 | 21 | dogpile.core==0.4.1 |
|
22 | 22 | ecdsa==0.11 |
|
23 | 23 | FormEncode==1.2.4 |
|
24 | 24 | future==0.14.3 |
|
25 | 25 | futures==3.0.2 |
|
26 | 26 | gnureadline==6.3.3 |
|
27 | 27 | infrae.cache==1.0.1 |
|
28 | 28 | iso8601==0.1.11 |
|
29 | 29 | itsdangerous==0.24 |
|
30 | 30 | Jinja2==2.7.3 |
|
31 | 31 | kombu==1.5.1 |
|
32 | 32 | Mako==1.0.6 |
|
33 | 33 | Markdown==2.6.7 |
|
34 | 34 | MarkupSafe==0.23 |
|
35 | 35 | meld3==1.0.2 |
|
36 | 36 | msgpack-python==0.4.8 |
|
37 | 37 | MySQL-python==1.2.5 |
|
38 | 38 | nose==1.3.6 |
|
39 | 39 | objgraph==2.0.0 |
|
40 | 40 | packaging==15.2 |
|
41 | 41 | paramiko==1.15.1 |
|
42 | 42 | Paste==2.0.3 |
|
43 | 43 | PasteDeploy==1.5.2 |
|
44 | 44 | PasteScript==1.7.5 |
|
45 | 45 | psutil==4.3.1 |
|
46 | 46 | psycopg2==2.6.1 |
|
47 | 47 | py-bcrypt==0.4 |
|
48 | 48 | pycrypto==2.6.1 |
|
49 | 49 | pycurl==7.19.5 |
|
50 | 50 | pyflakes==0.8.1 |
|
51 | 51 | pygments-markdown-lexer==0.1.0.dev39 |
|
52 | 52 | Pygments==2.2.0 |
|
53 | 53 | pyparsing==1.5.7 |
|
54 | 54 | pyramid-beaker==0.8 |
|
55 | 55 | pyramid-debugtoolbar==3.0.5 |
|
56 | 56 | pyramid-jinja2==2.5 |
|
57 | 57 | pyramid-mako==1.0.2 |
|
58 | 58 | pyramid==1.7.4 |
|
59 | 59 | pysqlite==2.6.3 |
|
60 | 60 | python-dateutil==1.5 |
|
61 | 61 | python-ldap==2.4.19 |
|
62 | 62 | python-memcached==1.57 |
|
63 | 63 | python-pam==1.8.2 |
|
64 | 64 | pytz==2015.4 |
|
65 | 65 | pyzmq==14.6.0 |
|
66 | 66 | recaptcha-client==1.0.6 |
|
67 | 67 | repoze.lru==0.6 |
|
68 | 68 | requests==2.9.1 |
|
69 | 69 | Routes==1.13 |
|
70 | 70 | setproctitle==1.1.8 |
|
71 | 71 | simplejson==3.7.2 |
|
72 | 72 | six==1.9.0 |
|
73 | 73 | Sphinx==1.2.2 |
|
74 | 74 | SQLAlchemy==0.9.9 |
|
75 | 75 | subprocess32==3.2.6 |
|
76 | 76 | supervisor==3.3.1 |
|
77 | 77 | Tempita==0.5.2 |
|
78 | 78 | translationstring==1.3 |
|
79 | 79 | trollius==1.0.4 |
|
80 | 80 | urllib3==1.16 |
|
81 | 81 | URLObject==2.4.0 |
|
82 | 82 | venusian==1.0 |
|
83 | 83 | WebError==0.10.3 |
|
84 | 84 | WebHelpers2==2.0 |
|
85 | 85 | WebHelpers==1.3 |
|
86 | 86 | WebOb==1.3.1 |
|
87 | 87 | Whoosh==2.7.4 |
|
88 | 88 | wsgiref==0.1.2 |
|
89 | 89 | zope.cachedescriptors==4.0.0 |
|
90 | 90 | zope.deprecation==4.1.2 |
|
91 | 91 | zope.event==4.0.3 |
|
92 | 92 | zope.interface==4.1.3 |
|
93 | 93 | |
|
94 | 94 | ## customized/patched libs |
|
95 | 95 | # our patched version of Pylons==1.0.2 |
|
96 | 96 | https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f#egg=Pylons==1.0.2.rhodecode-patch-1 |
|
97 | 97 | # not released py-gfm==0.1.3 |
|
98 | 98 | https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16#egg=py-gfm==0.1.3.rhodecode-upstream1 |
|
99 | 99 | |
|
100 | 100 | |
|
101 | 101 | ## cli tools |
|
102 | 102 | alembic==0.8.4 |
|
103 | 103 | invoke==0.13.0 |
|
104 | 104 | bumpversion==0.5.3 |
|
105 | 105 | transifex-client==0.10 |
|
106 | 106 | |
|
107 | 107 | ## http servers |
|
108 | 108 | gevent==1.1.2 |
|
109 | 109 | greenlet==0.4.10 |
|
110 | 110 | gunicorn==19.6.0 |
|
111 | 111 | waitress==1.0.1 |
|
112 | 112 | uWSGI==2.0.11.2 |
|
113 | 113 | |
|
114 | 114 | ## debug |
|
115 | 115 | ipdb==0.10.1 |
|
116 | 116 | ipython==5.1.0 |
|
117 | 117 | CProfileV==1.0.6 |
|
118 | 118 | bottle==0.12.8 |
|
119 | 119 | |
|
120 | 120 | ## rhodecode-tools, special case |
|
121 | 121 | https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.11.0.tar.gz?md5=e5fd0a8363af08a0ced71b50ca9cce15#egg=rhodecode-tools==0.11.0 |
|
122 | 122 | |
|
123 | 123 | ## appenlight |
|
124 | 124 | appenlight-client==0.6.14 |
|
125 | 125 | |
|
126 | # Pyro/Deprecated TODO(Marcink): remove in 4.7 release. | |
|
127 | Pyro4==4.41 | |
|
128 | serpent==1.15 | |
|
129 | ||
|
130 | 126 | ## test related requirements |
|
131 | 127 | -r requirements_test.txt |
@@ -1,190 +1,189 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Pylons environment configuration |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os |
|
26 | 26 | import logging |
|
27 | 27 | import rhodecode |
|
28 | 28 | import platform |
|
29 | 29 | import re |
|
30 | 30 | import io |
|
31 | 31 | |
|
32 | 32 | from mako.lookup import TemplateLookup |
|
33 | 33 | from pylons.configuration import PylonsConfig |
|
34 | 34 | from pylons.error import handle_mako_error |
|
35 | 35 | from pyramid.settings import asbool |
|
36 | 36 | |
|
37 | 37 | # ------------------------------------------------------------------------------ |
|
38 | 38 | # CELERY magic until refactor - issue #4163 - import order matters here: |
|
39 | 39 | from rhodecode.lib import celerypylons # this must be first, celerypylons |
|
40 | 40 | # sets config settings upon import |
|
41 | 41 | |
|
42 | 42 | import rhodecode.integrations # any modules using celery task |
|
43 | 43 | # decorators should be added afterwards: |
|
44 | 44 | # ------------------------------------------------------------------------------ |
|
45 | 45 | |
|
46 | 46 | from rhodecode.lib import app_globals |
|
47 | 47 | from rhodecode.config import utils |
|
48 | 48 | from rhodecode.config.routing import make_map |
|
49 | 49 | from rhodecode.config.jsroutes import generate_jsroutes_content |
|
50 | 50 | |
|
51 | 51 | from rhodecode.lib import helpers |
|
52 | 52 | from rhodecode.lib.auth import set_available_permissions |
|
53 | 53 | from rhodecode.lib.utils import ( |
|
54 | 54 | repo2db_mapper, make_db_config, set_rhodecode_config, |
|
55 | 55 | load_rcextensions) |
|
56 | 56 | from rhodecode.lib.utils2 import str2bool, aslist |
|
57 | 57 | from rhodecode.lib.vcs import connect_vcs, start_vcs_server |
|
58 | 58 | from rhodecode.model.scm import ScmModel |
|
59 | 59 | |
|
60 | 60 | log = logging.getLogger(__name__) |
|
61 | 61 | |
|
62 | 62 | def load_environment(global_conf, app_conf, initial=False, |
|
63 | 63 | test_env=None, test_index=None): |
|
64 | 64 | """ |
|
65 | 65 | Configure the Pylons environment via the ``pylons.config`` |
|
66 | 66 | object |
|
67 | 67 | """ |
|
68 | 68 | config = PylonsConfig() |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | # Pylons paths |
|
72 | 72 | root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
|
73 | 73 | paths = { |
|
74 | 74 | 'root': root, |
|
75 | 75 | 'controllers': os.path.join(root, 'controllers'), |
|
76 | 76 | 'static_files': os.path.join(root, 'public'), |
|
77 | 77 | 'templates': [os.path.join(root, 'templates')], |
|
78 | 78 | } |
|
79 | 79 | |
|
80 | 80 | # Initialize config with the basic options |
|
81 | 81 | config.init_app(global_conf, app_conf, package='rhodecode', paths=paths) |
|
82 | 82 | |
|
83 | 83 | # store some globals into rhodecode |
|
84 | 84 | rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery')) |
|
85 | 85 | rhodecode.CELERY_EAGER = str2bool( |
|
86 | 86 | config['app_conf'].get('celery.always.eager')) |
|
87 | 87 | |
|
88 | 88 | config['routes.map'] = make_map(config) |
|
89 | 89 | |
|
90 | 90 | if asbool(config.get('generate_js_files', 'false')): |
|
91 | 91 | jsroutes = config['routes.map'].jsroutes() |
|
92 | 92 | jsroutes_file_content = generate_jsroutes_content(jsroutes) |
|
93 | 93 | jsroutes_file_path = os.path.join( |
|
94 | 94 | paths['static_files'], 'js', 'rhodecode', 'routes.js') |
|
95 | 95 | |
|
96 | 96 | with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f: |
|
97 | 97 | f.write(jsroutes_file_content) |
|
98 | 98 | |
|
99 | 99 | config['pylons.app_globals'] = app_globals.Globals(config) |
|
100 | 100 | config['pylons.h'] = helpers |
|
101 | 101 | rhodecode.CONFIG = config |
|
102 | 102 | |
|
103 | 103 | load_rcextensions(root_path=config['here']) |
|
104 | 104 | |
|
105 | 105 | # Setup cache object as early as possible |
|
106 | 106 | import pylons |
|
107 | 107 | pylons.cache._push_object(config['pylons.app_globals'].cache) |
|
108 | 108 | |
|
109 | 109 | # Create the Mako TemplateLookup, with the default auto-escaping |
|
110 | 110 | config['pylons.app_globals'].mako_lookup = TemplateLookup( |
|
111 | 111 | directories=paths['templates'], |
|
112 | 112 | error_handler=handle_mako_error, |
|
113 | 113 | module_directory=os.path.join(app_conf['cache_dir'], 'templates'), |
|
114 | 114 | input_encoding='utf-8', default_filters=['escape'], |
|
115 | 115 | imports=['from webhelpers.html import escape']) |
|
116 | 116 | |
|
117 | 117 | # sets the c attribute access when don't existing attribute are accessed |
|
118 | 118 | config['pylons.strict_tmpl_context'] = True |
|
119 | 119 | |
|
120 | 120 | # configure channelstream |
|
121 | 121 | config['channelstream_config'] = { |
|
122 | 122 | 'enabled': asbool(config.get('channelstream.enabled', False)), |
|
123 | 123 | 'server': config.get('channelstream.server'), |
|
124 | 124 | 'secret': config.get('channelstream.secret') |
|
125 | 125 | } |
|
126 | 126 | |
|
127 | 127 | set_available_permissions(config) |
|
128 | 128 | db_cfg = make_db_config(clear_session=True) |
|
129 | 129 | |
|
130 | 130 | repos_path = list(db_cfg.items('paths'))[0][1] |
|
131 | 131 | config['base_path'] = repos_path |
|
132 | 132 | |
|
133 | 133 | # store db config also in main global CONFIG |
|
134 | 134 | set_rhodecode_config(config) |
|
135 | 135 | |
|
136 | 136 | # configure instance id |
|
137 | 137 | utils.set_instance_id(config) |
|
138 | 138 | |
|
139 | 139 | # CONFIGURATION OPTIONS HERE (note: all config options will override |
|
140 | 140 | # any Pylons config options) |
|
141 | 141 | |
|
142 | 142 | # store config reference into our module to skip import magic of pylons |
|
143 | 143 | rhodecode.CONFIG.update(config) |
|
144 | 144 | |
|
145 | 145 | return config |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | def load_pyramid_environment(global_config, settings): |
|
149 | 149 | # Some parts of the code expect a merge of global and app settings. |
|
150 | 150 | settings_merged = global_config.copy() |
|
151 | 151 | settings_merged.update(settings) |
|
152 | 152 | |
|
153 | 153 | # Store the settings to make them available to other modules. |
|
154 | 154 | rhodecode.PYRAMID_SETTINGS = settings_merged |
|
155 | 155 | |
|
156 | 156 | # If this is a test run we prepare the test environment like |
|
157 | 157 | # creating a test database, test search index and test repositories. |
|
158 | 158 | # This has to be done before the database connection is initialized. |
|
159 | 159 | if settings['is_test']: |
|
160 | 160 | rhodecode.is_test = True |
|
161 | 161 | rhodecode.disable_error_handler = True |
|
162 | 162 | |
|
163 | 163 | utils.initialize_test_environment(settings_merged) |
|
164 | 164 | |
|
165 | 165 | # Initialize the database connection. |
|
166 | 166 | utils.initialize_database(settings_merged) |
|
167 | 167 | |
|
168 | 168 | # Limit backends to `vcs.backends` from configuration |
|
169 | 169 | for alias in rhodecode.BACKENDS.keys(): |
|
170 | 170 | if alias not in settings['vcs.backends']: |
|
171 | 171 | del rhodecode.BACKENDS[alias] |
|
172 | 172 | log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys()) |
|
173 | 173 | |
|
174 | 174 | # initialize vcs client and optionally run the server if enabled |
|
175 | 175 | vcs_server_uri = settings['vcs.server'] |
|
176 | 176 | vcs_server_enabled = settings['vcs.server.enable'] |
|
177 | 177 | start_server = ( |
|
178 | 178 | settings['vcs.start_server'] and |
|
179 | 179 | not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0'))) |
|
180 | 180 | |
|
181 | 181 | if vcs_server_enabled and start_server: |
|
182 | 182 | log.info("Starting vcsserver") |
|
183 | 183 | start_vcs_server(server_and_port=vcs_server_uri, |
|
184 | 184 | protocol=utils.get_vcs_server_protocol(settings), |
|
185 | 185 | log_level=settings['vcs.server.log_level']) |
|
186 | 186 | |
|
187 | utils.configure_pyro4(settings) | |
|
188 | 187 | utils.configure_vcs(settings) |
|
189 | 188 | if vcs_server_enabled: |
|
190 | 189 | connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings)) |
@@ -1,256 +1,250 b'' | |||
|
1 | 1 | { |
|
2 | 2 | "nodejs-4.3.1": { |
|
3 | 3 | "MIT License": "http://spdx.org/licenses/MIT" |
|
4 | 4 | }, |
|
5 | 5 | "postgresql-9.5.1": { |
|
6 | 6 | "PostgreSQL License": "http://spdx.org/licenses/PostgreSQL" |
|
7 | 7 | }, |
|
8 | 8 | "python-2.7.11": { |
|
9 | 9 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
10 | 10 | }, |
|
11 | 11 | "python2.7-Babel-1.3": { |
|
12 | 12 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
13 | 13 | }, |
|
14 | 14 | "python2.7-Beaker-1.7.0": { |
|
15 | 15 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
16 | 16 | }, |
|
17 | 17 | "python2.7-FormEncode-1.2.4": { |
|
18 | 18 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
19 | 19 | }, |
|
20 | 20 | "python2.7-Mako-1.0.1": { |
|
21 | 21 | "MIT License": "http://spdx.org/licenses/MIT" |
|
22 | 22 | }, |
|
23 | 23 | "python2.7-Markdown-2.6.2": { |
|
24 | 24 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
25 | 25 | }, |
|
26 | 26 | "python2.7-MarkupSafe-0.23": { |
|
27 | 27 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
28 | 28 | }, |
|
29 | 29 | "python2.7-Paste-2.0.2": { |
|
30 | 30 | "MIT License": "http://spdx.org/licenses/MIT" |
|
31 | 31 | }, |
|
32 | 32 | "python2.7-PasteDeploy-1.5.2": { |
|
33 | 33 | "MIT License": "http://spdx.org/licenses/MIT" |
|
34 | 34 | }, |
|
35 | 35 | "python2.7-PasteScript-1.7.5": { |
|
36 | 36 | "MIT License": "http://spdx.org/licenses/MIT" |
|
37 | 37 | }, |
|
38 | 38 | "python2.7-Pygments-2.0.2": { |
|
39 | 39 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
40 | 40 | }, |
|
41 | 41 | "python2.7-Pylons-1.0.1-patch1": { |
|
42 | 42 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
43 | 43 |
}, |
|
44 | "python2.7-Pyro4-4.35": { | |
|
45 | "MIT License": "http://spdx.org/licenses/MIT" | |
|
46 | }, | |
|
47 | 44 | "python2.7-Routes-1.13": { |
|
48 | 45 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
49 | 46 | }, |
|
50 | 47 | "python2.7-SQLAlchemy-0.9.9": { |
|
51 | 48 | "MIT License": "http://spdx.org/licenses/MIT" |
|
52 | 49 | }, |
|
53 | 50 | "python2.7-Tempita-0.5.2": { |
|
54 | 51 | "MIT License": "http://spdx.org/licenses/MIT" |
|
55 | 52 | }, |
|
56 | 53 | "python2.7-URLObject-2.4.0": { |
|
57 | 54 | "The Unlicense": "http://unlicense.org/" |
|
58 | 55 | }, |
|
59 | 56 | "python2.7-WebError-0.10.3": { |
|
60 | 57 | "MIT License": "http://spdx.org/licenses/MIT" |
|
61 | 58 | }, |
|
62 | 59 | "python2.7-WebHelpers-1.3": { |
|
63 | 60 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
64 | 61 | }, |
|
65 | 62 | "python2.7-WebHelpers2-2.0": { |
|
66 | 63 | "MIT License": "http://spdx.org/licenses/MIT" |
|
67 | 64 | }, |
|
68 | 65 | "python2.7-WebOb-1.3.1": { |
|
69 | 66 | "MIT License": "http://spdx.org/licenses/MIT" |
|
70 | 67 | }, |
|
71 | 68 | "python2.7-Whoosh-2.7.0": { |
|
72 | 69 | "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause", |
|
73 | 70 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
74 | 71 | }, |
|
75 | 72 | "python2.7-alembic-0.8.4": { |
|
76 | 73 | "MIT License": "http://spdx.org/licenses/MIT" |
|
77 | 74 | }, |
|
78 | 75 | "python2.7-amqplib-1.0.2": { |
|
79 | 76 | "GNU Lesser General Public License v3.0 only": "http://spdx.org/licenses/LGPL-3.0" |
|
80 | 77 | }, |
|
81 | 78 | "python2.7-anyjson-0.3.3": { |
|
82 | 79 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
83 | 80 | }, |
|
84 | 81 | "python2.7-appenlight-client-0.6.14": { |
|
85 | 82 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
86 | 83 | }, |
|
87 | 84 | "python2.7-authomatic-0.1.0.post1": { |
|
88 | 85 | "MIT License": "http://spdx.org/licenses/MIT" |
|
89 | 86 | }, |
|
90 | 87 | "python2.7-backport-ipaddress-0.1": { |
|
91 | 88 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
92 | 89 | }, |
|
93 | 90 | "python2.7-celery-2.2.10": { |
|
94 | 91 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
95 | 92 | }, |
|
96 | 93 | "python2.7-click-5.1": { |
|
97 | 94 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
98 | 95 | }, |
|
99 | 96 | "python2.7-colander-1.2": { |
|
100 | 97 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
101 | 98 | }, |
|
102 | 99 | "python2.7-configobj-5.0.6": { |
|
103 | 100 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
104 | 101 | }, |
|
105 | 102 | "python2.7-cssselect-0.9.1": { |
|
106 | 103 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
107 | 104 | }, |
|
108 | 105 | "python2.7-decorator-3.4.2": { |
|
109 | 106 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
110 | 107 | }, |
|
111 | 108 | "python2.7-docutils-0.12": { |
|
112 | 109 | "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause" |
|
113 | 110 | }, |
|
114 | 111 | "python2.7-elasticsearch-2.3.0": { |
|
115 | 112 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
116 | 113 | }, |
|
117 | 114 | "python2.7-elasticsearch-dsl-2.0.0": { |
|
118 | 115 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
119 | 116 | }, |
|
120 | 117 | "python2.7-future-0.14.3": { |
|
121 | 118 | "MIT License": "http://spdx.org/licenses/MIT" |
|
122 | 119 | }, |
|
123 | 120 | "python2.7-futures-3.0.2": { |
|
124 | 121 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
125 | 122 | }, |
|
126 | 123 | "python2.7-gnureadline-6.3.3": { |
|
127 | 124 | "GNU General Public License v1.0 only": "http://spdx.org/licenses/GPL-1.0" |
|
128 | 125 | }, |
|
129 | 126 | "python2.7-gunicorn-19.6.0": { |
|
130 | 127 | "MIT License": "http://spdx.org/licenses/MIT" |
|
131 | 128 | }, |
|
132 | 129 | "python2.7-infrae.cache-1.0.1": { |
|
133 | 130 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
134 | 131 | }, |
|
135 | 132 | "python2.7-ipython-3.1.0": { |
|
136 | 133 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
137 | 134 | }, |
|
138 | 135 | "python2.7-iso8601-0.1.11": { |
|
139 | 136 | "MIT License": "http://spdx.org/licenses/MIT" |
|
140 | 137 | }, |
|
141 | 138 | "python2.7-kombu-1.5.1": { |
|
142 | 139 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
143 | 140 | }, |
|
144 | 141 | "python2.7-msgpack-python-0.4.6": { |
|
145 | 142 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
146 | 143 | }, |
|
147 | 144 | "python2.7-packaging-15.2": { |
|
148 | 145 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
149 | 146 | }, |
|
150 | 147 | "python2.7-psutil-2.2.1": { |
|
151 | 148 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
152 | 149 | }, |
|
153 | 150 | "python2.7-psycopg2-2.6": { |
|
154 | 151 | "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+" |
|
155 | 152 | }, |
|
156 | 153 | "python2.7-py-1.4.29": { |
|
157 | 154 | "MIT License": "http://spdx.org/licenses/MIT" |
|
158 | 155 | }, |
|
159 | 156 | "python2.7-py-bcrypt-0.4": { |
|
160 | 157 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
161 | 158 | }, |
|
162 | 159 | "python2.7-pycrypto-2.6.1": { |
|
163 | 160 | "Public Domain": null |
|
164 | 161 | }, |
|
165 | 162 | "python2.7-pycurl-7.19.5": { |
|
166 | 163 | "MIT License": "http://spdx.org/licenses/MIT" |
|
167 | 164 | }, |
|
168 | 165 | "python2.7-pyparsing-1.5.7": { |
|
169 | 166 | "MIT License": "http://spdx.org/licenses/MIT" |
|
170 | 167 | }, |
|
171 | 168 | "python2.7-pyramid-1.6.1": { |
|
172 | 169 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
173 | 170 | }, |
|
174 | 171 | "python2.7-pyramid-beaker-0.8": { |
|
175 | 172 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
176 | 173 | }, |
|
177 | 174 | "python2.7-pyramid-debugtoolbar-2.4.2": { |
|
178 | 175 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause", |
|
179 | 176 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
180 | 177 | }, |
|
181 | 178 | "python2.7-pyramid-mako-1.0.2": { |
|
182 | 179 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
183 | 180 | }, |
|
184 | 181 | "python2.7-pysqlite-2.6.3": { |
|
185 | 182 | "libpng License": "http://spdx.org/licenses/Libpng", |
|
186 | 183 | "zlib License": "http://spdx.org/licenses/Zlib" |
|
187 | 184 | }, |
|
188 | 185 | "python2.7-pytest-2.8.5": { |
|
189 | 186 | "MIT License": "http://spdx.org/licenses/MIT" |
|
190 | 187 | }, |
|
191 | 188 | "python2.7-pytest-runner-2.7.1": { |
|
192 | 189 | "MIT License": "http://spdx.org/licenses/MIT" |
|
193 | 190 | }, |
|
194 | 191 | "python2.7-python-dateutil-1.5": { |
|
195 | 192 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
196 | 193 | }, |
|
197 | 194 | "python2.7-python-editor-1.0.1": { |
|
198 | 195 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
199 | 196 | }, |
|
200 | 197 | "python2.7-python-ldap-2.4.19": { |
|
201 | 198 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
202 | 199 | }, |
|
203 | 200 | "python2.7-python-memcached-1.57": { |
|
204 | 201 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
205 | 202 | }, |
|
206 | 203 | "python2.7-pytz-2015.4": { |
|
207 | 204 | "MIT License": "http://spdx.org/licenses/MIT" |
|
208 | 205 | }, |
|
209 | 206 | "python2.7-recaptcha-client-1.0.6": { |
|
210 | 207 | "MIT License": "http://spdx.org/licenses/MIT" |
|
211 | 208 | }, |
|
212 | 209 | "python2.7-repoze.lru-0.6": { |
|
213 | 210 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
214 | 211 | }, |
|
215 | 212 | "python2.7-requests-2.9.1": { |
|
216 | 213 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
217 | 214 |
}, |
|
218 | "python2.7-serpent-1.12": { | |
|
219 | "MIT License": "http://spdx.org/licenses/MIT" | |
|
220 | }, | |
|
221 | 215 | "python2.7-setuptools-19.4": { |
|
222 | 216 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0", |
|
223 | 217 | "Zope Public License 2.0": "http://spdx.org/licenses/ZPL-2.0" |
|
224 | 218 | }, |
|
225 | 219 | "python2.7-setuptools-scm-1.11.0": { |
|
226 | 220 | "MIT License": "http://spdx.org/licenses/MIT" |
|
227 | 221 | }, |
|
228 | 222 | "python2.7-simplejson-3.7.2": { |
|
229 | 223 | "Academic Free License": "http://spdx.org/licenses/AFL-2.1", |
|
230 | 224 | "MIT License": "http://spdx.org/licenses/MIT" |
|
231 | 225 | }, |
|
232 | 226 | "python2.7-six-1.9.0": { |
|
233 | 227 | "MIT License": "http://spdx.org/licenses/MIT" |
|
234 | 228 | }, |
|
235 | 229 | "python2.7-translationstring-1.3": { |
|
236 | 230 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
237 | 231 | }, |
|
238 | 232 | "python2.7-urllib3-1.16": { |
|
239 | 233 | "MIT License": "http://spdx.org/licenses/MIT" |
|
240 | 234 | }, |
|
241 | 235 | "python2.7-venusian-1.0": { |
|
242 | 236 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
243 | 237 | }, |
|
244 | 238 | "python2.7-waitress-0.8.9": { |
|
245 | 239 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
246 | 240 | }, |
|
247 | 241 | "python2.7-zope.cachedescriptors-4.0.0": { |
|
248 | 242 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
249 | 243 | }, |
|
250 | 244 | "python2.7-zope.deprecation-4.1.2": { |
|
251 | 245 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
252 | 246 | }, |
|
253 | 247 | "python2.7-zope.interface-4.1.3": { |
|
254 | 248 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
255 | 249 | } |
|
256 | 250 | } No newline at end of file |
@@ -1,98 +1,80 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import shlex |
|
23 | import Pyro4 | |
|
24 | 23 | import platform |
|
25 | 24 | |
|
26 | 25 | from rhodecode.model import init_model |
|
27 | 26 | |
|
28 | 27 | |
|
29 | def configure_pyro4(config): | |
|
30 | """ | |
|
31 | Configure Pyro4 based on `config`. | |
|
32 | ||
|
33 | This will mainly set the different configuration parameters of the Pyro4 | |
|
34 | library based on the settings in our INI files. The Pyro4 documentation | |
|
35 | lists more details about the specific settings and their meaning. | |
|
36 | """ | |
|
37 | Pyro4.config.COMMTIMEOUT = float(config['vcs.connection_timeout']) | |
|
38 | Pyro4.config.SERIALIZER = 'pickle' | |
|
39 | Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') | |
|
40 | ||
|
41 | # Note: We need server configuration in the WSGI processes | |
|
42 | # because we provide a callback server in certain vcs operations. | |
|
43 | Pyro4.config.SERVERTYPE = "multiplex" | |
|
44 | Pyro4.config.POLLTIMEOUT = 0.01 | |
|
45 | ||
|
46 | 28 | |
|
47 | 29 | def configure_vcs(config): |
|
48 | 30 | """ |
|
49 | 31 | Patch VCS config with some RhodeCode specific stuff |
|
50 | 32 | """ |
|
51 | 33 | from rhodecode.lib.vcs import conf |
|
52 | 34 | conf.settings.BACKENDS = { |
|
53 | 35 | 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository', |
|
54 | 36 | 'git': 'rhodecode.lib.vcs.backends.git.GitRepository', |
|
55 | 37 | 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository', |
|
56 | 38 | } |
|
57 | 39 | |
|
58 | 40 | conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol'] |
|
59 | 41 | conf.settings.HOOKS_DIRECT_CALLS = config['vcs.hooks.direct_calls'] |
|
60 | 42 | conf.settings.GIT_REV_FILTER = shlex.split(config['git_rev_filter']) |
|
61 | 43 | conf.settings.DEFAULT_ENCODINGS = config['default_encoding'] |
|
62 | 44 | conf.settings.ALIASES[:] = config['vcs.backends'] |
|
63 | 45 | conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version'] |
|
64 | 46 | |
|
65 | 47 | |
|
66 | 48 | def initialize_database(config): |
|
67 | 49 | from rhodecode.lib.utils2 import engine_from_config, get_encryption_key |
|
68 | 50 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
69 | 51 | init_model(engine, encryption_key=get_encryption_key(config)) |
|
70 | 52 | |
|
71 | 53 | |
|
72 | 54 | def initialize_test_environment(settings, test_env=None): |
|
73 | 55 | if test_env is None: |
|
74 | 56 | test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0)) |
|
75 | 57 | |
|
76 | 58 | from rhodecode.lib.utils import ( |
|
77 | 59 | create_test_directory, create_test_database, create_test_repositories, |
|
78 | 60 | create_test_index) |
|
79 | 61 | from rhodecode.tests import TESTS_TMP_PATH |
|
80 | 62 | # test repos |
|
81 | 63 | if test_env: |
|
82 | 64 | create_test_directory(TESTS_TMP_PATH) |
|
83 | 65 | create_test_database(TESTS_TMP_PATH, settings) |
|
84 | 66 | create_test_repositories(TESTS_TMP_PATH, settings) |
|
85 | 67 | create_test_index(TESTS_TMP_PATH, settings) |
|
86 | 68 | |
|
87 | 69 | |
|
88 | 70 | def get_vcs_server_protocol(config): |
|
89 | 71 | return config['vcs.server.protocol'] |
|
90 | 72 | |
|
91 | 73 | |
|
92 | 74 | def set_instance_id(config): |
|
93 | 75 | """ Sets a dynamic generated config['instance_id'] if missing or '*' """ |
|
94 | 76 | |
|
95 | 77 | config['instance_id'] = config.get('instance_id') or '' |
|
96 | 78 | if config['instance_id'] == '*' or not config['instance_id']: |
|
97 | 79 | _platform_id = platform.uname()[1] or 'instance' |
|
98 | 80 | config['instance_id'] = '%s-%s' % (_platform_id, os.getpid()) |
@@ -1,237 +1,236 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | """ |
|
21 | 21 | celery libs for RhodeCode |
|
22 | 22 | """ |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | import pylons |
|
26 | 26 | import socket |
|
27 | 27 | import logging |
|
28 | 28 | |
|
29 | 29 | import rhodecode |
|
30 | 30 | |
|
31 | 31 | from os.path import join as jn |
|
32 | 32 | from pylons import config |
|
33 | 33 | from celery.task import Task |
|
34 | 34 | from pyramid.request import Request |
|
35 | 35 | from pyramid.scripting import prepare |
|
36 | 36 | from pyramid.threadlocal import get_current_request |
|
37 | 37 | |
|
38 | 38 | from decorator import decorator |
|
39 | 39 | |
|
40 | 40 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
41 | 41 | |
|
42 | 42 | from rhodecode.config import utils |
|
43 | 43 | from rhodecode.lib.utils2 import ( |
|
44 | 44 | safe_str, md5_safe, aslist, get_routes_generator_for_server_url, |
|
45 | 45 | get_server_url) |
|
46 | 46 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
47 | 47 | from rhodecode.lib.vcs import connect_vcs |
|
48 | 48 | from rhodecode.model import meta |
|
49 | 49 | from rhodecode.lib.auth import AuthUser |
|
50 | 50 | |
|
51 | 51 | log = logging.getLogger(__name__) |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | class ResultWrapper(object): |
|
55 | 55 | def __init__(self, task): |
|
56 | 56 | self.task = task |
|
57 | 57 | |
|
58 | 58 | @LazyProperty |
|
59 | 59 | def result(self): |
|
60 | 60 | return self.task |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | class RhodecodeCeleryTask(Task): |
|
64 | 64 | """ |
|
65 | 65 | This is a celery task which will create a rhodecode app instance context |
|
66 | 66 | for the task, patch pyramid + pylons threadlocals with the original request |
|
67 | 67 | that created the task and also add the user to the context. |
|
68 | 68 | |
|
69 | 69 | This class as a whole should be removed once the pylons port is complete |
|
70 | 70 | and a pyramid only solution for celery is implemented as per issue #4139 |
|
71 | 71 | """ |
|
72 | 72 | |
|
73 | 73 | def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, |
|
74 | 74 | link=None, link_error=None, **options): |
|
75 | 75 | """ queue the job to run (we are in web request context here) """ |
|
76 | 76 | |
|
77 | 77 | request = get_current_request() |
|
78 | 78 | |
|
79 | 79 | if hasattr(request, 'user'): |
|
80 | 80 | ip_addr = request.user.ip_addr |
|
81 | 81 | user_id = request.user.user_id |
|
82 | 82 | elif hasattr(request, 'rpc_params'): |
|
83 | 83 | # TODO(marcink) remove when migration is finished |
|
84 | 84 | # api specific call on Pyramid. |
|
85 | 85 | ip_addr = request.rpc_params['apiuser'].ip_addr |
|
86 | 86 | user_id = request.rpc_params['apiuser'].user_id |
|
87 | 87 | else: |
|
88 | 88 | raise Exception('Unable to fetch data from request: {}'.format( |
|
89 | 89 | request)) |
|
90 | 90 | |
|
91 | 91 | if request: |
|
92 | 92 | # we hook into kwargs since it is the only way to pass our data to |
|
93 | 93 | # the celery worker in celery 2.2 |
|
94 | 94 | kwargs.update({ |
|
95 | 95 | '_rhodecode_proxy_data': { |
|
96 | 96 | 'environ': { |
|
97 | 97 | 'PATH_INFO': request.environ['PATH_INFO'], |
|
98 | 98 | 'SCRIPT_NAME': request.environ['SCRIPT_NAME'], |
|
99 | 99 | 'HTTP_HOST': request.environ.get('HTTP_HOST', |
|
100 | 100 | request.environ['SERVER_NAME']), |
|
101 | 101 | 'SERVER_NAME': request.environ['SERVER_NAME'], |
|
102 | 102 | 'SERVER_PORT': request.environ['SERVER_PORT'], |
|
103 | 103 | 'wsgi.url_scheme': request.environ['wsgi.url_scheme'], |
|
104 | 104 | }, |
|
105 | 105 | 'auth_user': { |
|
106 | 106 | 'ip_addr': ip_addr, |
|
107 | 107 | 'user_id': user_id |
|
108 | 108 | }, |
|
109 | 109 | } |
|
110 | 110 | }) |
|
111 | 111 | return super(RhodecodeCeleryTask, self).apply_async( |
|
112 | 112 | args, kwargs, task_id, producer, link, link_error, **options) |
|
113 | 113 | |
|
114 | 114 | def __call__(self, *args, **kwargs): |
|
115 | 115 | """ rebuild the context and then run task on celery worker """ |
|
116 | 116 | proxy_data = kwargs.pop('_rhodecode_proxy_data', {}) |
|
117 | 117 | |
|
118 | 118 | if not proxy_data: |
|
119 | 119 | return super(RhodecodeCeleryTask, self).__call__(*args, **kwargs) |
|
120 | 120 | |
|
121 | 121 | log.debug('using celery proxy data to run task: %r', proxy_data) |
|
122 | 122 | |
|
123 | 123 | from rhodecode.config.routing import make_map |
|
124 | 124 | |
|
125 | 125 | request = Request.blank('/', environ=proxy_data['environ']) |
|
126 | 126 | request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'], |
|
127 | 127 | ip_addr=proxy_data['auth_user']['ip_addr']) |
|
128 | 128 | |
|
129 | 129 | pyramid_request = prepare(request) # set pyramid threadlocal request |
|
130 | 130 | |
|
131 | 131 | # pylons routing |
|
132 | 132 | if not rhodecode.CONFIG.get('routes.map'): |
|
133 | 133 | rhodecode.CONFIG['routes.map'] = make_map(config) |
|
134 | 134 | pylons.url._push_object(get_routes_generator_for_server_url( |
|
135 | 135 | get_server_url(request.environ) |
|
136 | 136 | )) |
|
137 | 137 | |
|
138 | 138 | try: |
|
139 | 139 | return super(RhodecodeCeleryTask, self).__call__(*args, **kwargs) |
|
140 | 140 | finally: |
|
141 | 141 | pyramid_request['closer']() |
|
142 | 142 | pylons.url._pop_object() |
|
143 | 143 | |
|
144 | 144 | |
|
145 | 145 | def run_task(task, *args, **kwargs): |
|
146 | 146 | if rhodecode.CELERY_ENABLED: |
|
147 | 147 | celery_is_up = False |
|
148 | 148 | try: |
|
149 | 149 | t = task.apply_async(args=args, kwargs=kwargs) |
|
150 | 150 | log.info('running task %s:%s', t.task_id, task) |
|
151 | 151 | celery_is_up = True |
|
152 | 152 | return t |
|
153 | 153 | |
|
154 | 154 | except socket.error as e: |
|
155 | 155 | if isinstance(e, IOError) and e.errno == 111: |
|
156 | 156 | log.error('Unable to connect to celeryd. Sync execution') |
|
157 | 157 | else: |
|
158 | 158 | log.exception("Exception while connecting to celeryd.") |
|
159 | 159 | except KeyError as e: |
|
160 | 160 | log.error('Unable to connect to celeryd. Sync execution') |
|
161 | 161 | except Exception as e: |
|
162 | 162 | log.exception( |
|
163 | 163 | "Exception while trying to run task asynchronous. " |
|
164 | 164 | "Fallback to sync execution.") |
|
165 | 165 | |
|
166 | 166 | # keep in mind there maybe a subtle race condition where something |
|
167 | 167 | # depending on rhodecode.CELERY_ENABLED such as @dbsession decorator |
|
168 | 168 | # will see CELERY_ENABLED as True before this has a chance to set False |
|
169 | 169 | rhodecode.CELERY_ENABLED = celery_is_up |
|
170 | 170 | else: |
|
171 | 171 | log.debug('executing task %s in sync mode', task) |
|
172 | 172 | return ResultWrapper(task(*args, **kwargs)) |
|
173 | 173 | |
|
174 | 174 | |
|
175 | 175 | def __get_lockkey(func, *fargs, **fkwargs): |
|
176 | 176 | params = list(fargs) |
|
177 | 177 | params.extend(['%s-%s' % ar for ar in fkwargs.items()]) |
|
178 | 178 | |
|
179 | 179 | func_name = str(func.__name__) if hasattr(func, '__name__') else str(func) |
|
180 | 180 | _lock_key = func_name + '-' + '-'.join(map(safe_str, params)) |
|
181 | 181 | return 'task_%s.lock' % (md5_safe(_lock_key),) |
|
182 | 182 | |
|
183 | 183 | |
|
184 | 184 | def locked_task(func): |
|
185 | 185 | def __wrapper(func, *fargs, **fkwargs): |
|
186 | 186 | lockkey = __get_lockkey(func, *fargs, **fkwargs) |
|
187 | 187 | lockkey_path = config['app_conf']['cache_dir'] |
|
188 | 188 | |
|
189 | 189 | log.info('running task with lockkey %s' % lockkey) |
|
190 | 190 | try: |
|
191 | 191 | l = DaemonLock(file_=jn(lockkey_path, lockkey)) |
|
192 | 192 | ret = func(*fargs, **fkwargs) |
|
193 | 193 | l.release() |
|
194 | 194 | return ret |
|
195 | 195 | except LockHeld: |
|
196 | 196 | log.info('LockHeld') |
|
197 | 197 | return 'Task with key %s already running' % lockkey |
|
198 | 198 | |
|
199 | 199 | return decorator(__wrapper, func) |
|
200 | 200 | |
|
201 | 201 | |
|
202 | 202 | def get_session(): |
|
203 | 203 | if rhodecode.CELERY_ENABLED: |
|
204 | 204 | utils.initialize_database(config) |
|
205 | 205 | sa = meta.Session() |
|
206 | 206 | return sa |
|
207 | 207 | |
|
208 | 208 | |
|
209 | 209 | def dbsession(func): |
|
210 | 210 | def __wrapper(func, *fargs, **fkwargs): |
|
211 | 211 | try: |
|
212 | 212 | ret = func(*fargs, **fkwargs) |
|
213 | 213 | return ret |
|
214 | 214 | finally: |
|
215 | 215 | if rhodecode.CELERY_ENABLED and not rhodecode.CELERY_EAGER: |
|
216 | 216 | meta.Session.remove() |
|
217 | 217 | |
|
218 | 218 | return decorator(__wrapper, func) |
|
219 | 219 | |
|
220 | 220 | |
|
221 | 221 | def vcsconnection(func): |
|
222 | 222 | def __wrapper(func, *fargs, **fkwargs): |
|
223 | 223 | if rhodecode.CELERY_ENABLED and not rhodecode.CELERY_EAGER: |
|
224 | 224 | settings = rhodecode.PYRAMID_SETTINGS |
|
225 | 225 | backends = settings['vcs.backends'] |
|
226 | 226 | for alias in rhodecode.BACKENDS.keys(): |
|
227 | 227 | if alias not in backends: |
|
228 | 228 | del rhodecode.BACKENDS[alias] |
|
229 | utils.configure_pyro4(settings) | |
|
230 | 229 | utils.configure_vcs(settings) |
|
231 | 230 | connect_vcs( |
|
232 | 231 | settings['vcs.server'], |
|
233 | 232 | utils.get_vcs_server_protocol(settings)) |
|
234 | 233 | ret = func(*fargs, **fkwargs) |
|
235 | 234 | return ret |
|
236 | 235 | |
|
237 | 236 | return decorator(__wrapper, func) |
@@ -1,278 +1,238 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import json |
|
22 | 22 | import logging |
|
23 | 23 | import urlparse |
|
24 | 24 | import threading |
|
25 | 25 | from BaseHTTPServer import BaseHTTPRequestHandler |
|
26 | 26 | from SocketServer import TCPServer |
|
27 | 27 | from routes.util import URLGenerator |
|
28 | 28 | |
|
29 | import Pyro4 | |
|
30 | 29 | import pylons |
|
31 | 30 | import rhodecode |
|
32 | 31 | |
|
33 | 32 | from rhodecode.model import meta |
|
34 | 33 | from rhodecode.lib import hooks_base |
|
35 | 34 | from rhodecode.lib.utils2 import ( |
|
36 | 35 | AttributeDict, safe_str, get_routes_generator_for_server_url) |
|
37 | 36 | |
|
38 | 37 | |
|
39 | 38 | log = logging.getLogger(__name__) |
|
40 | 39 | |
|
41 | 40 | |
|
42 | 41 | class HooksHttpHandler(BaseHTTPRequestHandler): |
|
43 | 42 | def do_POST(self): |
|
44 | 43 | method, extras = self._read_request() |
|
45 | 44 | try: |
|
46 | 45 | result = self._call_hook(method, extras) |
|
47 | 46 | except Exception as e: |
|
48 | 47 | result = { |
|
49 | 48 | 'exception': e.__class__.__name__, |
|
50 | 49 | 'exception_args': e.args |
|
51 | 50 | } |
|
52 | 51 | self._write_response(result) |
|
53 | 52 | |
|
54 | 53 | def _read_request(self): |
|
55 | 54 | length = int(self.headers['Content-Length']) |
|
56 | 55 | body = self.rfile.read(length).decode('utf-8') |
|
57 | 56 | data = json.loads(body) |
|
58 | 57 | return data['method'], data['extras'] |
|
59 | 58 | |
|
60 | 59 | def _write_response(self, result): |
|
61 | 60 | self.send_response(200) |
|
62 | 61 | self.send_header("Content-type", "text/json") |
|
63 | 62 | self.end_headers() |
|
64 | 63 | self.wfile.write(json.dumps(result)) |
|
65 | 64 | |
|
66 | 65 | def _call_hook(self, method, extras): |
|
67 | 66 | hooks = Hooks() |
|
68 | 67 | try: |
|
69 | 68 | result = getattr(hooks, method)(extras) |
|
70 | 69 | finally: |
|
71 | 70 | meta.Session.remove() |
|
72 | 71 | return result |
|
73 | 72 | |
|
74 | 73 | def log_message(self, format, *args): |
|
75 | 74 | """ |
|
76 | This is an overriden method of BaseHTTPRequestHandler which logs using | |
|
75 | This is an overridden method of BaseHTTPRequestHandler which logs using | |
|
77 | 76 | logging library instead of writing directly to stderr. |
|
78 | 77 | """ |
|
79 | 78 | |
|
80 | 79 | message = format % args |
|
81 | 80 | |
|
82 | 81 | # TODO: mikhail: add different log levels support |
|
83 | 82 | log.debug( |
|
84 | 83 | "%s - - [%s] %s", self.client_address[0], |
|
85 | 84 | self.log_date_time_string(), message) |
|
86 | 85 | |
|
87 | 86 | |
|
88 | 87 | class DummyHooksCallbackDaemon(object): |
|
89 | 88 | def __init__(self): |
|
90 | 89 | self.hooks_module = Hooks.__module__ |
|
91 | 90 | |
|
92 | 91 | def __enter__(self): |
|
93 | 92 | log.debug('Running dummy hooks callback daemon') |
|
94 | 93 | return self |
|
95 | 94 | |
|
96 | 95 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
97 | 96 | log.debug('Exiting dummy hooks callback daemon') |
|
98 | 97 | |
|
99 | 98 | |
|
100 | 99 | class ThreadedHookCallbackDaemon(object): |
|
101 | 100 | |
|
102 | 101 | _callback_thread = None |
|
103 | 102 | _daemon = None |
|
104 | 103 | _done = False |
|
105 | 104 | |
|
106 | 105 | def __init__(self): |
|
107 | 106 | self._prepare() |
|
108 | 107 | |
|
109 | 108 | def __enter__(self): |
|
110 | 109 | self._run() |
|
111 | 110 | return self |
|
112 | 111 | |
|
113 | 112 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
114 | 113 | self._stop() |
|
115 | 114 | |
|
116 | 115 | def _prepare(self): |
|
117 | 116 | raise NotImplementedError() |
|
118 | 117 | |
|
119 | 118 | def _run(self): |
|
120 | 119 | raise NotImplementedError() |
|
121 | 120 | |
|
122 | 121 | def _stop(self): |
|
123 | 122 | raise NotImplementedError() |
|
124 | 123 | |
|
125 | 124 | |
|
126 | class Pyro4HooksCallbackDaemon(ThreadedHookCallbackDaemon): | |
|
127 | """ | |
|
128 | Context manager which will run a callback daemon in a background thread. | |
|
129 | """ | |
|
130 | ||
|
131 | hooks_uri = None | |
|
132 | ||
|
133 | def _prepare(self): | |
|
134 | log.debug("Preparing callback daemon and registering hook object") | |
|
135 | self._daemon = Pyro4.Daemon() | |
|
136 | hooks_interface = Hooks() | |
|
137 | self.hooks_uri = str(self._daemon.register(hooks_interface)) | |
|
138 | log.debug("Hooks uri is: %s", self.hooks_uri) | |
|
139 | ||
|
140 | def _run(self): | |
|
141 | log.debug("Running event loop of callback daemon in background thread") | |
|
142 | callback_thread = threading.Thread( | |
|
143 | target=self._daemon.requestLoop, | |
|
144 | kwargs={'loopCondition': lambda: not self._done}) | |
|
145 | callback_thread.daemon = True | |
|
146 | callback_thread.start() | |
|
147 | self._callback_thread = callback_thread | |
|
148 | ||
|
149 | def _stop(self): | |
|
150 | log.debug("Waiting for background thread to finish.") | |
|
151 | self._done = True | |
|
152 | self._callback_thread.join() | |
|
153 | self._daemon.close() | |
|
154 | self._daemon = None | |
|
155 | self._callback_thread = None | |
|
156 | ||
|
157 | ||
|
158 | 125 | class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon): |
|
159 | 126 | """ |
|
160 | 127 | Context manager which will run a callback daemon in a background thread. |
|
161 | 128 | """ |
|
162 | 129 | |
|
163 | 130 | hooks_uri = None |
|
164 | 131 | |
|
165 | 132 | IP_ADDRESS = '127.0.0.1' |
|
166 | 133 | |
|
167 | 134 | # From Python docs: Polling reduces our responsiveness to a shutdown |
|
168 | 135 | # request and wastes cpu at all other times. |
|
169 | 136 | POLL_INTERVAL = 0.1 |
|
170 | 137 | |
|
171 | 138 | def _prepare(self): |
|
172 | 139 | log.debug("Preparing callback daemon and registering hook object") |
|
173 | 140 | |
|
174 | 141 | self._done = False |
|
175 | 142 | self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler) |
|
176 | 143 | _, port = self._daemon.server_address |
|
177 | 144 | self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port) |
|
178 | 145 | |
|
179 | 146 | log.debug("Hooks uri is: %s", self.hooks_uri) |
|
180 | 147 | |
|
181 | 148 | def _run(self): |
|
182 | 149 | log.debug("Running event loop of callback daemon in background thread") |
|
183 | 150 | callback_thread = threading.Thread( |
|
184 | 151 | target=self._daemon.serve_forever, |
|
185 | 152 | kwargs={'poll_interval': self.POLL_INTERVAL}) |
|
186 | 153 | callback_thread.daemon = True |
|
187 | 154 | callback_thread.start() |
|
188 | 155 | self._callback_thread = callback_thread |
|
189 | 156 | |
|
190 | 157 | def _stop(self): |
|
191 | 158 | log.debug("Waiting for background thread to finish.") |
|
192 | 159 | self._daemon.shutdown() |
|
193 | 160 | self._callback_thread.join() |
|
194 | 161 | self._daemon = None |
|
195 | 162 | self._callback_thread = None |
|
196 | 163 | |
|
197 | 164 | |
|
198 | 165 | def prepare_callback_daemon(extras, protocol, use_direct_calls): |
|
199 | 166 | callback_daemon = None |
|
200 | 167 | |
|
201 | 168 | if use_direct_calls: |
|
202 | 169 | callback_daemon = DummyHooksCallbackDaemon() |
|
203 | 170 | extras['hooks_module'] = callback_daemon.hooks_module |
|
204 | 171 | else: |
|
205 |
if protocol == 'p |
|
|
206 | callback_daemon = Pyro4HooksCallbackDaemon() | |
|
207 | elif protocol == 'http': | |
|
172 | if protocol == 'http': | |
|
208 | 173 | callback_daemon = HttpHooksCallbackDaemon() |
|
209 | 174 | else: |
|
210 | 175 | log.error('Unsupported callback daemon protocol "%s"', protocol) |
|
211 | 176 | raise Exception('Unsupported callback daemon protocol.') |
|
212 | 177 | |
|
213 | 178 | extras['hooks_uri'] = callback_daemon.hooks_uri |
|
214 | 179 | extras['hooks_protocol'] = protocol |
|
215 | 180 | |
|
216 | 181 | return callback_daemon, extras |
|
217 | 182 | |
|
218 | 183 | |
|
219 | 184 | class Hooks(object): |
|
220 | 185 | """ |
|
221 | 186 | Exposes the hooks for remote call backs |
|
222 | 187 | """ |
|
223 | 188 | |
|
224 | @Pyro4.callback | |
|
225 | 189 | def repo_size(self, extras): |
|
226 | 190 | log.debug("Called repo_size of Hooks object") |
|
227 | 191 | return self._call_hook(hooks_base.repo_size, extras) |
|
228 | 192 | |
|
229 | @Pyro4.callback | |
|
230 | 193 | def pre_pull(self, extras): |
|
231 | 194 | log.debug("Called pre_pull of Hooks object") |
|
232 | 195 | return self._call_hook(hooks_base.pre_pull, extras) |
|
233 | 196 | |
|
234 | @Pyro4.callback | |
|
235 | 197 | def post_pull(self, extras): |
|
236 | 198 | log.debug("Called post_pull of Hooks object") |
|
237 | 199 | return self._call_hook(hooks_base.post_pull, extras) |
|
238 | 200 | |
|
239 | @Pyro4.callback | |
|
240 | 201 | def pre_push(self, extras): |
|
241 | 202 | log.debug("Called pre_push of Hooks object") |
|
242 | 203 | return self._call_hook(hooks_base.pre_push, extras) |
|
243 | 204 | |
|
244 | @Pyro4.callback | |
|
245 | 205 | def post_push(self, extras): |
|
246 | 206 | log.debug("Called post_push of Hooks object") |
|
247 | 207 | return self._call_hook(hooks_base.post_push, extras) |
|
248 | 208 | |
|
249 | 209 | def _call_hook(self, hook, extras): |
|
250 | 210 | extras = AttributeDict(extras) |
|
251 | 211 | pylons_router = get_routes_generator_for_server_url(extras.server_url) |
|
252 | 212 | pylons.url._push_object(pylons_router) |
|
253 | 213 | |
|
254 | 214 | try: |
|
255 | 215 | result = hook(extras) |
|
256 | 216 | except Exception as error: |
|
257 | 217 | log.exception('Exception when handling hook %s', hook) |
|
258 | 218 | error_args = error.args |
|
259 | 219 | return { |
|
260 | 220 | 'status': 128, |
|
261 | 221 | 'output': '', |
|
262 | 222 | 'exception': type(error).__name__, |
|
263 | 223 | 'exception_args': error_args, |
|
264 | 224 | } |
|
265 | 225 | finally: |
|
266 | 226 | pylons.url._pop_object() |
|
267 | 227 | meta.Session.remove() |
|
268 | 228 | |
|
269 | 229 | return { |
|
270 | 230 | 'status': result.status, |
|
271 | 231 | 'output': result.output, |
|
272 | 232 | } |
|
273 | 233 | |
|
274 | 234 | def __enter__(self): |
|
275 | 235 | return self |
|
276 | 236 | |
|
277 | 237 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
278 | 238 | pass |
@@ -1,136 +1,141 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import sys |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38) |
|
26 | 26 | |
|
27 | 27 | # Sequences |
|
28 | 28 | RESET_SEQ = "\033[0m" |
|
29 | 29 | COLOR_SEQ = "\033[0;%dm" |
|
30 | 30 | BOLD_SEQ = "\033[1m" |
|
31 | 31 | |
|
32 | 32 | COLORS = { |
|
33 | 33 | 'CRITICAL': MAGENTA, |
|
34 | 34 | 'ERROR': RED, |
|
35 | 35 | 'WARNING': CYAN, |
|
36 | 36 | 'INFO': GREEN, |
|
37 | 37 | 'DEBUG': BLUE, |
|
38 | 38 | 'SQL': YELLOW |
|
39 | 39 | } |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | def one_space_trim(s): |
|
43 | 43 | if s.find(" ") == -1: |
|
44 | 44 | return s |
|
45 | 45 | else: |
|
46 | 46 | s = s.replace(' ', ' ') |
|
47 | 47 | return one_space_trim(s) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | def format_sql(sql): |
|
51 | 51 | sql = sql.replace('\n', '') |
|
52 | 52 | sql = one_space_trim(sql) |
|
53 | 53 | sql = sql\ |
|
54 | 54 | .replace(',', ',\n\t')\ |
|
55 | 55 | .replace('SELECT', '\n\tSELECT \n\t')\ |
|
56 | 56 | .replace('UPDATE', '\n\tUPDATE \n\t')\ |
|
57 | 57 | .replace('DELETE', '\n\tDELETE \n\t')\ |
|
58 | 58 | .replace('FROM', '\n\tFROM')\ |
|
59 | 59 | .replace('ORDER BY', '\n\tORDER BY')\ |
|
60 | 60 | .replace('LIMIT', '\n\tLIMIT')\ |
|
61 | 61 | .replace('WHERE', '\n\tWHERE')\ |
|
62 | 62 | .replace('AND', '\n\tAND')\ |
|
63 | 63 | .replace('LEFT', '\n\tLEFT')\ |
|
64 | 64 | .replace('INNER', '\n\tINNER')\ |
|
65 | 65 | .replace('INSERT', '\n\tINSERT')\ |
|
66 | 66 | .replace('DELETE', '\n\tDELETE') |
|
67 | 67 | return sql |
|
68 | 68 | |
|
69 | 69 | |
|
70 |
class |
|
|
70 | class ExceptionAwareFormatter(logging.Formatter): | |
|
71 | 71 | """ |
|
72 |
Extended logging formatter which prints out |
|
|
72 | Extended logging formatter which prints out remote tracebacks. | |
|
73 | 73 | """ |
|
74 | 74 | |
|
75 | 75 | def formatException(self, ei): |
|
76 | 76 | ex_type, ex_value, ex_tb = ei |
|
77 | 77 | |
|
78 | 78 | local_tb = logging.Formatter.formatException(self, ei) |
|
79 | 79 | if hasattr(ex_value, '_vcs_server_traceback'): |
|
80 | 80 | |
|
81 | 81 | def formatRemoteTraceback(remote_tb_lines): |
|
82 | 82 | result = ["\n +--- This exception occured remotely on VCSServer - Remote traceback:\n\n"] |
|
83 | 83 | result.append(remote_tb_lines) |
|
84 | 84 | result.append("\n +--- End of remote traceback\n") |
|
85 | 85 | return result |
|
86 | 86 | |
|
87 | 87 | try: |
|
88 | 88 | if ex_type is not None and ex_value is None and ex_tb is None: |
|
89 |
# possible old (3.x) call syntax where caller is only |
|
|
89 | # possible old (3.x) call syntax where caller is only | |
|
90 | # providing exception object | |
|
90 | 91 | if type(ex_type) is not type: |
|
91 | 92 | raise TypeError( |
|
92 |
"invalid argument: ex_type should be an exception |
|
|
93 | "invalid argument: ex_type should be an exception " | |
|
94 | "type, or just supply no arguments at all") | |
|
93 | 95 | if ex_type is None and ex_tb is None: |
|
94 | 96 | ex_type, ex_value, ex_tb = sys.exc_info() |
|
95 | 97 | |
|
96 | 98 | remote_tb = getattr(ex_value, "_vcs_server_traceback", None) |
|
97 | 99 | |
|
98 | 100 | if remote_tb: |
|
99 | 101 | remote_tb = formatRemoteTraceback(remote_tb) |
|
100 | 102 | return local_tb + ''.join(remote_tb) |
|
101 | 103 | finally: |
|
102 | 104 | # clean up cycle to traceback, to allow proper GC |
|
103 | 105 | del ex_type, ex_value, ex_tb |
|
104 | 106 | |
|
105 | 107 | return local_tb |
|
106 | 108 | |
|
107 | 109 | |
|
108 |
class ColorFormatter( |
|
|
110 | class ColorFormatter(ExceptionAwareFormatter): | |
|
109 | 111 | |
|
110 | 112 | def format(self, record): |
|
111 | 113 | """ |
|
112 | 114 | Changes record's levelname to use with COLORS enum |
|
113 | 115 | """ |
|
114 | 116 | |
|
115 | 117 | levelname = record.levelname |
|
116 | 118 | start = COLOR_SEQ % (COLORS[levelname]) |
|
117 | 119 | def_record = logging.Formatter.format(self, record) |
|
118 | 120 | end = RESET_SEQ |
|
119 | 121 | |
|
120 | 122 | colored_record = ''.join([start, def_record, end]) |
|
121 | 123 | return colored_record |
|
122 | 124 | |
|
123 | 125 | |
|
124 | 126 | class ColorFormatterSql(logging.Formatter): |
|
125 | 127 | |
|
126 | 128 | def format(self, record): |
|
127 | 129 | """ |
|
128 | 130 | Changes record's levelname to use with COLORS enum |
|
129 | 131 | """ |
|
130 | 132 | |
|
131 | 133 | start = COLOR_SEQ % (COLORS['SQL']) |
|
132 | 134 | def_record = format_sql(logging.Formatter.format(self, record)) |
|
133 | 135 | end = RESET_SEQ |
|
134 | 136 | |
|
135 | 137 | colored_record = ''.join([start, def_record, end]) |
|
136 | 138 | return colored_record |
|
139 | ||
|
140 | # marcink: needs to stay with this name for backward .ini compatability | |
|
141 | Pyro4AwareFormatter = ExceptionAwareFormatter |
@@ -1,529 +1,526 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | SimpleVCS middleware for handling protocol request (push/clone etc.) |
|
23 | 23 | It's implemented with basic auth function |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import logging |
|
28 | 28 | import importlib |
|
29 | 29 | import re |
|
30 | 30 | from functools import wraps |
|
31 | 31 | |
|
32 | 32 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE |
|
33 | 33 | from webob.exc import ( |
|
34 | 34 | HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError) |
|
35 | 35 | |
|
36 | 36 | import rhodecode |
|
37 | 37 | from rhodecode.authentication.base import authenticate, VCS_TYPE |
|
38 | 38 | from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware |
|
39 | 39 | from rhodecode.lib.base import BasicAuth, get_ip_addr, vcs_operation_context |
|
40 | 40 | from rhodecode.lib.exceptions import ( |
|
41 | 41 | HTTPLockedRC, HTTPRequirementError, UserCreationError, |
|
42 | 42 | NotAllowedToCreateUserError) |
|
43 | 43 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
44 | 44 | from rhodecode.lib.middleware import appenlight |
|
45 |
from rhodecode.lib.middleware.utils import |
|
|
45 | from rhodecode.lib.middleware.utils import scm_app_http | |
|
46 | 46 | from rhodecode.lib.utils import ( |
|
47 | 47 | is_valid_repo, get_rhodecode_realm, get_rhodecode_base_path, SLUG_RE) |
|
48 | 48 | from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode |
|
49 | 49 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
50 | 50 | from rhodecode.lib.vcs.backends import base |
|
51 | 51 | from rhodecode.model import meta |
|
52 | 52 | from rhodecode.model.db import User, Repository, PullRequest |
|
53 | 53 | from rhodecode.model.scm import ScmModel |
|
54 | 54 | from rhodecode.model.pull_request import PullRequestModel |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | log = logging.getLogger(__name__) |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | def initialize_generator(factory): |
|
61 | 61 | """ |
|
62 | 62 | Initializes the returned generator by draining its first element. |
|
63 | 63 | |
|
64 | 64 | This can be used to give a generator an initializer, which is the code |
|
65 | 65 | up to the first yield statement. This decorator enforces that the first |
|
66 | 66 | produced element has the value ``"__init__"`` to make its special |
|
67 | 67 | purpose very explicit in the using code. |
|
68 | 68 | """ |
|
69 | 69 | |
|
70 | 70 | @wraps(factory) |
|
71 | 71 | def wrapper(*args, **kwargs): |
|
72 | 72 | gen = factory(*args, **kwargs) |
|
73 | 73 | try: |
|
74 | 74 | init = gen.next() |
|
75 | 75 | except StopIteration: |
|
76 | 76 | raise ValueError('Generator must yield at least one element.') |
|
77 | 77 | if init != "__init__": |
|
78 | 78 | raise ValueError('First yielded element must be "__init__".') |
|
79 | 79 | return gen |
|
80 | 80 | return wrapper |
|
81 | 81 | |
|
82 | 82 | |
|
83 | 83 | class SimpleVCS(object): |
|
84 | 84 | """Common functionality for SCM HTTP handlers.""" |
|
85 | 85 | |
|
86 | 86 | SCM = 'unknown' |
|
87 | 87 | |
|
88 | 88 | acl_repo_name = None |
|
89 | 89 | url_repo_name = None |
|
90 | 90 | vcs_repo_name = None |
|
91 | 91 | |
|
92 | 92 | # We have to handle requests to shadow repositories different than requests |
|
93 | 93 | # to normal repositories. Therefore we have to distinguish them. To do this |
|
94 | 94 | # we use this regex which will match only on URLs pointing to shadow |
|
95 | 95 | # repositories. |
|
96 | 96 | shadow_repo_re = re.compile( |
|
97 | 97 | '(?P<groups>(?:{slug_pat}/)*)' # repo groups |
|
98 | 98 | '(?P<target>{slug_pat})/' # target repo |
|
99 | 99 | 'pull-request/(?P<pr_id>\d+)/' # pull request |
|
100 | 100 | 'repository$' # shadow repo |
|
101 | 101 | .format(slug_pat=SLUG_RE.pattern)) |
|
102 | 102 | |
|
103 | 103 | def __init__(self, application, config, registry): |
|
104 | 104 | self.registry = registry |
|
105 | 105 | self.application = application |
|
106 | 106 | self.config = config |
|
107 | 107 | # re-populated by specialized middleware |
|
108 | 108 | self.repo_vcs_config = base.Config() |
|
109 | 109 | |
|
110 | 110 | # base path of repo locations |
|
111 | 111 | self.basepath = get_rhodecode_base_path() |
|
112 | 112 | # authenticate this VCS request using authfunc |
|
113 | 113 | auth_ret_code_detection = \ |
|
114 | 114 | str2bool(self.config.get('auth_ret_code_detection', False)) |
|
115 | 115 | self.authenticate = BasicAuth( |
|
116 | 116 | '', authenticate, registry, config.get('auth_ret_code'), |
|
117 | 117 | auth_ret_code_detection) |
|
118 | 118 | self.ip_addr = '0.0.0.0' |
|
119 | 119 | |
|
120 | 120 | def set_repo_names(self, environ): |
|
121 | 121 | """ |
|
122 | 122 | This will populate the attributes acl_repo_name, url_repo_name, |
|
123 | 123 | vcs_repo_name and is_shadow_repo. In case of requests to normal (non |
|
124 | 124 | shadow) repositories all names are equal. In case of requests to a |
|
125 | 125 | shadow repository the acl-name points to the target repo of the pull |
|
126 | 126 | request and the vcs-name points to the shadow repo file system path. |
|
127 | 127 | The url-name is always the URL used by the vcs client program. |
|
128 | 128 | |
|
129 | 129 | Example in case of a shadow repo: |
|
130 | 130 | acl_repo_name = RepoGroup/MyRepo |
|
131 | 131 | url_repo_name = RepoGroup/MyRepo/pull-request/3/repository |
|
132 | 132 | vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3' |
|
133 | 133 | """ |
|
134 | 134 | # First we set the repo name from URL for all attributes. This is the |
|
135 | 135 | # default if handling normal (non shadow) repo requests. |
|
136 | 136 | self.url_repo_name = self._get_repository_name(environ) |
|
137 | 137 | self.acl_repo_name = self.vcs_repo_name = self.url_repo_name |
|
138 | 138 | self.is_shadow_repo = False |
|
139 | 139 | |
|
140 | 140 | # Check if this is a request to a shadow repository. |
|
141 | 141 | match = self.shadow_repo_re.match(self.url_repo_name) |
|
142 | 142 | if match: |
|
143 | 143 | match_dict = match.groupdict() |
|
144 | 144 | |
|
145 | 145 | # Build acl repo name from regex match. |
|
146 | 146 | acl_repo_name = safe_unicode('{groups}{target}'.format( |
|
147 | 147 | groups=match_dict['groups'] or '', |
|
148 | 148 | target=match_dict['target'])) |
|
149 | 149 | |
|
150 | 150 | # Retrieve pull request instance by ID from regex match. |
|
151 | 151 | pull_request = PullRequest.get(match_dict['pr_id']) |
|
152 | 152 | |
|
153 | 153 | # Only proceed if we got a pull request and if acl repo name from |
|
154 | 154 | # URL equals the target repo name of the pull request. |
|
155 | 155 | if pull_request and (acl_repo_name == |
|
156 | 156 | pull_request.target_repo.repo_name): |
|
157 | 157 | # Get file system path to shadow repository. |
|
158 | 158 | workspace_id = PullRequestModel()._workspace_id(pull_request) |
|
159 | 159 | target_vcs = pull_request.target_repo.scm_instance() |
|
160 | 160 | vcs_repo_name = target_vcs._get_shadow_repository_path( |
|
161 | 161 | workspace_id) |
|
162 | 162 | |
|
163 | 163 | # Store names for later usage. |
|
164 | 164 | self.vcs_repo_name = vcs_repo_name |
|
165 | 165 | self.acl_repo_name = acl_repo_name |
|
166 | 166 | self.is_shadow_repo = True |
|
167 | 167 | |
|
168 | 168 | log.debug('Setting all VCS repository names: %s', { |
|
169 | 169 | 'acl_repo_name': self.acl_repo_name, |
|
170 | 170 | 'url_repo_name': self.url_repo_name, |
|
171 | 171 | 'vcs_repo_name': self.vcs_repo_name, |
|
172 | 172 | }) |
|
173 | 173 | |
|
174 | 174 | @property |
|
175 | 175 | def scm_app(self): |
|
176 | 176 | custom_implementation = self.config['vcs.scm_app_implementation'] |
|
177 | 177 | if custom_implementation == 'http': |
|
178 | 178 | log.info('Using HTTP implementation of scm app.') |
|
179 | 179 | scm_app_impl = scm_app_http |
|
180 | elif custom_implementation == 'pyro4': | |
|
181 | log.info('Using Pyro implementation of scm app.') | |
|
182 | scm_app_impl = scm_app | |
|
183 | 180 | else: |
|
184 | 181 | log.info('Using custom implementation of scm_app: "{}"'.format( |
|
185 | 182 | custom_implementation)) |
|
186 | 183 | scm_app_impl = importlib.import_module(custom_implementation) |
|
187 | 184 | return scm_app_impl |
|
188 | 185 | |
|
189 | 186 | def _get_by_id(self, repo_name): |
|
190 | 187 | """ |
|
191 | 188 | Gets a special pattern _<ID> from clone url and tries to replace it |
|
192 | 189 | with a repository_name for support of _<ID> non changeable urls |
|
193 | 190 | """ |
|
194 | 191 | |
|
195 | 192 | data = repo_name.split('/') |
|
196 | 193 | if len(data) >= 2: |
|
197 | 194 | from rhodecode.model.repo import RepoModel |
|
198 | 195 | by_id_match = RepoModel().get_repo_by_id(repo_name) |
|
199 | 196 | if by_id_match: |
|
200 | 197 | data[1] = by_id_match.repo_name |
|
201 | 198 | |
|
202 | 199 | return safe_str('/'.join(data)) |
|
203 | 200 | |
|
204 | 201 | def _invalidate_cache(self, repo_name): |
|
205 | 202 | """ |
|
206 | 203 | Set's cache for this repository for invalidation on next access |
|
207 | 204 | |
|
208 | 205 | :param repo_name: full repo name, also a cache key |
|
209 | 206 | """ |
|
210 | 207 | ScmModel().mark_for_invalidation(repo_name) |
|
211 | 208 | |
|
212 | 209 | def is_valid_and_existing_repo(self, repo_name, base_path, scm_type): |
|
213 | 210 | db_repo = Repository.get_by_repo_name(repo_name) |
|
214 | 211 | if not db_repo: |
|
215 | 212 | log.debug('Repository `%s` not found inside the database.', |
|
216 | 213 | repo_name) |
|
217 | 214 | return False |
|
218 | 215 | |
|
219 | 216 | if db_repo.repo_type != scm_type: |
|
220 | 217 | log.warning( |
|
221 | 218 | 'Repository `%s` have incorrect scm_type, expected %s got %s', |
|
222 | 219 | repo_name, db_repo.repo_type, scm_type) |
|
223 | 220 | return False |
|
224 | 221 | |
|
225 | 222 | return is_valid_repo(repo_name, base_path, explicit_scm=scm_type) |
|
226 | 223 | |
|
227 | 224 | def valid_and_active_user(self, user): |
|
228 | 225 | """ |
|
229 | 226 | Checks if that user is not empty, and if it's actually object it checks |
|
230 | 227 | if he's active. |
|
231 | 228 | |
|
232 | 229 | :param user: user object or None |
|
233 | 230 | :return: boolean |
|
234 | 231 | """ |
|
235 | 232 | if user is None: |
|
236 | 233 | return False |
|
237 | 234 | |
|
238 | 235 | elif user.active: |
|
239 | 236 | return True |
|
240 | 237 | |
|
241 | 238 | return False |
|
242 | 239 | |
|
243 | 240 | def _check_permission(self, action, user, repo_name, ip_addr=None): |
|
244 | 241 | """ |
|
245 | 242 | Checks permissions using action (push/pull) user and repository |
|
246 | 243 | name |
|
247 | 244 | |
|
248 | 245 | :param action: push or pull action |
|
249 | 246 | :param user: user instance |
|
250 | 247 | :param repo_name: repository name |
|
251 | 248 | """ |
|
252 | 249 | # check IP |
|
253 | 250 | inherit = user.inherit_default_permissions |
|
254 | 251 | ip_allowed = AuthUser.check_ip_allowed(user.user_id, ip_addr, |
|
255 | 252 | inherit_from_default=inherit) |
|
256 | 253 | if ip_allowed: |
|
257 | 254 | log.info('Access for IP:%s allowed', ip_addr) |
|
258 | 255 | else: |
|
259 | 256 | return False |
|
260 | 257 | |
|
261 | 258 | if action == 'push': |
|
262 | 259 | if not HasPermissionAnyMiddleware('repository.write', |
|
263 | 260 | 'repository.admin')(user, |
|
264 | 261 | repo_name): |
|
265 | 262 | return False |
|
266 | 263 | |
|
267 | 264 | else: |
|
268 | 265 | # any other action need at least read permission |
|
269 | 266 | if not HasPermissionAnyMiddleware('repository.read', |
|
270 | 267 | 'repository.write', |
|
271 | 268 | 'repository.admin')(user, |
|
272 | 269 | repo_name): |
|
273 | 270 | return False |
|
274 | 271 | |
|
275 | 272 | return True |
|
276 | 273 | |
|
277 | 274 | def _check_ssl(self, environ, start_response): |
|
278 | 275 | """ |
|
279 | 276 | Checks the SSL check flag and returns False if SSL is not present |
|
280 | 277 | and required True otherwise |
|
281 | 278 | """ |
|
282 | 279 | org_proto = environ['wsgi._org_proto'] |
|
283 | 280 | # check if we have SSL required ! if not it's a bad request ! |
|
284 | 281 | require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl')) |
|
285 | 282 | if require_ssl and org_proto == 'http': |
|
286 | 283 | log.debug('proto is %s and SSL is required BAD REQUEST !', |
|
287 | 284 | org_proto) |
|
288 | 285 | return False |
|
289 | 286 | return True |
|
290 | 287 | |
|
291 | 288 | def __call__(self, environ, start_response): |
|
292 | 289 | try: |
|
293 | 290 | return self._handle_request(environ, start_response) |
|
294 | 291 | except Exception: |
|
295 | 292 | log.exception("Exception while handling request") |
|
296 | 293 | appenlight.track_exception(environ) |
|
297 | 294 | return HTTPInternalServerError()(environ, start_response) |
|
298 | 295 | finally: |
|
299 | 296 | meta.Session.remove() |
|
300 | 297 | |
|
301 | 298 | def _handle_request(self, environ, start_response): |
|
302 | 299 | |
|
303 | 300 | if not self._check_ssl(environ, start_response): |
|
304 | 301 | reason = ('SSL required, while RhodeCode was unable ' |
|
305 | 302 | 'to detect this as SSL request') |
|
306 | 303 | log.debug('User not allowed to proceed, %s', reason) |
|
307 | 304 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
308 | 305 | |
|
309 | 306 | if not self.url_repo_name: |
|
310 | 307 | log.warning('Repository name is empty: %s', self.url_repo_name) |
|
311 | 308 | # failed to get repo name, we fail now |
|
312 | 309 | return HTTPNotFound()(environ, start_response) |
|
313 | 310 | log.debug('Extracted repo name is %s', self.url_repo_name) |
|
314 | 311 | |
|
315 | 312 | ip_addr = get_ip_addr(environ) |
|
316 | 313 | username = None |
|
317 | 314 | |
|
318 | 315 | # skip passing error to error controller |
|
319 | 316 | environ['pylons.status_code_redirect'] = True |
|
320 | 317 | |
|
321 | 318 | # ====================================================================== |
|
322 | 319 | # GET ACTION PULL or PUSH |
|
323 | 320 | # ====================================================================== |
|
324 | 321 | action = self._get_action(environ) |
|
325 | 322 | |
|
326 | 323 | # ====================================================================== |
|
327 | 324 | # Check if this is a request to a shadow repository of a pull request. |
|
328 | 325 | # In this case only pull action is allowed. |
|
329 | 326 | # ====================================================================== |
|
330 | 327 | if self.is_shadow_repo and action != 'pull': |
|
331 | 328 | reason = 'Only pull action is allowed for shadow repositories.' |
|
332 | 329 | log.debug('User not allowed to proceed, %s', reason) |
|
333 | 330 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
334 | 331 | |
|
335 | 332 | # ====================================================================== |
|
336 | 333 | # CHECK ANONYMOUS PERMISSION |
|
337 | 334 | # ====================================================================== |
|
338 | 335 | if action in ['pull', 'push']: |
|
339 | 336 | anonymous_user = User.get_default_user() |
|
340 | 337 | username = anonymous_user.username |
|
341 | 338 | if anonymous_user.active: |
|
342 | 339 | # ONLY check permissions if the user is activated |
|
343 | 340 | anonymous_perm = self._check_permission( |
|
344 | 341 | action, anonymous_user, self.acl_repo_name, ip_addr) |
|
345 | 342 | else: |
|
346 | 343 | anonymous_perm = False |
|
347 | 344 | |
|
348 | 345 | if not anonymous_user.active or not anonymous_perm: |
|
349 | 346 | if not anonymous_user.active: |
|
350 | 347 | log.debug('Anonymous access is disabled, running ' |
|
351 | 348 | 'authentication') |
|
352 | 349 | |
|
353 | 350 | if not anonymous_perm: |
|
354 | 351 | log.debug('Not enough credentials to access this ' |
|
355 | 352 | 'repository as anonymous user') |
|
356 | 353 | |
|
357 | 354 | username = None |
|
358 | 355 | # ============================================================== |
|
359 | 356 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE |
|
360 | 357 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS |
|
361 | 358 | # ============================================================== |
|
362 | 359 | |
|
363 | 360 | # try to auth based on environ, container auth methods |
|
364 | 361 | log.debug('Running PRE-AUTH for container based authentication') |
|
365 | 362 | pre_auth = authenticate( |
|
366 | 363 | '', '', environ, VCS_TYPE, registry=self.registry) |
|
367 | 364 | if pre_auth and pre_auth.get('username'): |
|
368 | 365 | username = pre_auth['username'] |
|
369 | 366 | log.debug('PRE-AUTH got %s as username', username) |
|
370 | 367 | |
|
371 | 368 | # If not authenticated by the container, running basic auth |
|
372 | 369 | if not username: |
|
373 | 370 | self.authenticate.realm = get_rhodecode_realm() |
|
374 | 371 | |
|
375 | 372 | try: |
|
376 | 373 | result = self.authenticate(environ) |
|
377 | 374 | except (UserCreationError, NotAllowedToCreateUserError) as e: |
|
378 | 375 | log.error(e) |
|
379 | 376 | reason = safe_str(e) |
|
380 | 377 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
381 | 378 | |
|
382 | 379 | if isinstance(result, str): |
|
383 | 380 | AUTH_TYPE.update(environ, 'basic') |
|
384 | 381 | REMOTE_USER.update(environ, result) |
|
385 | 382 | username = result |
|
386 | 383 | else: |
|
387 | 384 | return result.wsgi_application(environ, start_response) |
|
388 | 385 | |
|
389 | 386 | # ============================================================== |
|
390 | 387 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME |
|
391 | 388 | # ============================================================== |
|
392 | 389 | user = User.get_by_username(username) |
|
393 | 390 | if not self.valid_and_active_user(user): |
|
394 | 391 | return HTTPForbidden()(environ, start_response) |
|
395 | 392 | username = user.username |
|
396 | 393 | user.update_lastactivity() |
|
397 | 394 | meta.Session().commit() |
|
398 | 395 | |
|
399 | 396 | # check user attributes for password change flag |
|
400 | 397 | user_obj = user |
|
401 | 398 | if user_obj and user_obj.username != User.DEFAULT_USER and \ |
|
402 | 399 | user_obj.user_data.get('force_password_change'): |
|
403 | 400 | reason = 'password change required' |
|
404 | 401 | log.debug('User not allowed to authenticate, %s', reason) |
|
405 | 402 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
406 | 403 | |
|
407 | 404 | # check permissions for this repository |
|
408 | 405 | perm = self._check_permission( |
|
409 | 406 | action, user, self.acl_repo_name, ip_addr) |
|
410 | 407 | if not perm: |
|
411 | 408 | return HTTPForbidden()(environ, start_response) |
|
412 | 409 | |
|
413 | 410 | # extras are injected into UI object and later available |
|
414 | 411 | # in hooks executed by rhodecode |
|
415 | 412 | check_locking = _should_check_locking(environ.get('QUERY_STRING')) |
|
416 | 413 | extras = vcs_operation_context( |
|
417 | 414 | environ, repo_name=self.acl_repo_name, username=username, |
|
418 | 415 | action=action, scm=self.SCM, check_locking=check_locking, |
|
419 | 416 | is_shadow_repo=self.is_shadow_repo |
|
420 | 417 | ) |
|
421 | 418 | |
|
422 | 419 | # ====================================================================== |
|
423 | 420 | # REQUEST HANDLING |
|
424 | 421 | # ====================================================================== |
|
425 | 422 | repo_path = os.path.join( |
|
426 | 423 | safe_str(self.basepath), safe_str(self.vcs_repo_name)) |
|
427 | 424 | log.debug('Repository path is %s', repo_path) |
|
428 | 425 | |
|
429 | 426 | fix_PATH() |
|
430 | 427 | |
|
431 | 428 | log.info( |
|
432 | 429 | '%s action on %s repo "%s" by "%s" from %s', |
|
433 | 430 | action, self.SCM, safe_str(self.url_repo_name), |
|
434 | 431 | safe_str(username), ip_addr) |
|
435 | 432 | |
|
436 | 433 | return self._generate_vcs_response( |
|
437 | 434 | environ, start_response, repo_path, extras, action) |
|
438 | 435 | |
|
439 | 436 | @initialize_generator |
|
440 | 437 | def _generate_vcs_response( |
|
441 | 438 | self, environ, start_response, repo_path, extras, action): |
|
442 | 439 | """ |
|
443 | 440 | Returns a generator for the response content. |
|
444 | 441 | |
|
445 | 442 | This method is implemented as a generator, so that it can trigger |
|
446 | 443 | the cache validation after all content sent back to the client. It |
|
447 | 444 | also handles the locking exceptions which will be triggered when |
|
448 | 445 | the first chunk is produced by the underlying WSGI application. |
|
449 | 446 | """ |
|
450 | 447 | callback_daemon, extras = self._prepare_callback_daemon(extras) |
|
451 | 448 | config = self._create_config(extras, self.acl_repo_name) |
|
452 | 449 | log.debug('HOOKS extras is %s', extras) |
|
453 | 450 | app = self._create_wsgi_app(repo_path, self.url_repo_name, config) |
|
454 | 451 | |
|
455 | 452 | try: |
|
456 | 453 | with callback_daemon: |
|
457 | 454 | try: |
|
458 | 455 | response = app(environ, start_response) |
|
459 | 456 | finally: |
|
460 | 457 | # This statement works together with the decorator |
|
461 | 458 | # "initialize_generator" above. The decorator ensures that |
|
462 | 459 | # we hit the first yield statement before the generator is |
|
463 | 460 | # returned back to the WSGI server. This is needed to |
|
464 | 461 | # ensure that the call to "app" above triggers the |
|
465 | 462 | # needed callback to "start_response" before the |
|
466 | 463 | # generator is actually used. |
|
467 | 464 | yield "__init__" |
|
468 | 465 | |
|
469 | 466 | for chunk in response: |
|
470 | 467 | yield chunk |
|
471 | 468 | except Exception as exc: |
|
472 | 469 | # TODO: martinb: Exceptions are only raised in case of the Pyro4 |
|
473 | 470 | # backend. Refactor this except block after dropping Pyro4 support. |
|
474 | 471 | # TODO: johbo: Improve "translating" back the exception. |
|
475 | 472 | if getattr(exc, '_vcs_kind', None) == 'repo_locked': |
|
476 | 473 | exc = HTTPLockedRC(*exc.args) |
|
477 | 474 | _code = rhodecode.CONFIG.get('lock_ret_code') |
|
478 | 475 | log.debug('Repository LOCKED ret code %s!', (_code,)) |
|
479 | 476 | elif getattr(exc, '_vcs_kind', None) == 'requirement': |
|
480 | 477 | log.debug( |
|
481 | 478 | 'Repository requires features unknown to this Mercurial') |
|
482 | 479 | exc = HTTPRequirementError(*exc.args) |
|
483 | 480 | else: |
|
484 | 481 | raise |
|
485 | 482 | |
|
486 | 483 | for chunk in exc(environ, start_response): |
|
487 | 484 | yield chunk |
|
488 | 485 | finally: |
|
489 | 486 | # invalidate cache on push |
|
490 | 487 | try: |
|
491 | 488 | if action == 'push': |
|
492 | 489 | self._invalidate_cache(self.url_repo_name) |
|
493 | 490 | finally: |
|
494 | 491 | meta.Session.remove() |
|
495 | 492 | |
|
496 | 493 | def _get_repository_name(self, environ): |
|
497 | 494 | """Get repository name out of the environmnent |
|
498 | 495 | |
|
499 | 496 | :param environ: WSGI environment |
|
500 | 497 | """ |
|
501 | 498 | raise NotImplementedError() |
|
502 | 499 | |
|
503 | 500 | def _get_action(self, environ): |
|
504 | 501 | """Map request commands into a pull or push command. |
|
505 | 502 | |
|
506 | 503 | :param environ: WSGI environment |
|
507 | 504 | """ |
|
508 | 505 | raise NotImplementedError() |
|
509 | 506 | |
|
510 | 507 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
511 | 508 | """Return the WSGI app that will finally handle the request.""" |
|
512 | 509 | raise NotImplementedError() |
|
513 | 510 | |
|
514 | 511 | def _create_config(self, extras, repo_name): |
|
515 |
"""Create a |
|
|
512 | """Create a safe config representation.""" | |
|
516 | 513 | raise NotImplementedError() |
|
517 | 514 | |
|
518 | 515 | def _prepare_callback_daemon(self, extras): |
|
519 | 516 | return prepare_callback_daemon( |
|
520 | 517 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
521 | 518 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
522 | 519 | |
|
523 | 520 | |
|
524 | 521 | def _should_check_locking(query_string): |
|
525 | 522 | # this is kind of hacky, but due to how mercurial handles client-server |
|
526 | 523 | # server see all operation on commit; bookmarks, phases and |
|
527 | 524 | # obsolescence marker in different transaction, we don't want to check |
|
528 | 525 | # locking on those |
|
529 | 526 | return query_string not in ['cmd=listkeys'] |
@@ -1,63 +1,63 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Equivalent of rhodecode.lib.middleware.scm_app but using remote apps. |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | 27 | from rhodecode.lib.middleware.utils import wsgi_app_caller_client |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | log = logging.getLogger(__name__) |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | HG_REMOTE_WSGI = None |
|
34 | 34 | GIT_REMOTE_WSGI = None |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def create_git_wsgi_app(repo_path, repo_name, config): |
|
38 | 38 | """ |
|
39 | 39 | Return a WSGI app backed by a remote app to handle Git. |
|
40 | 40 | |
|
41 | 41 | config is a dictionary holding the extras. |
|
42 | 42 | """ |
|
43 | 43 | factory = GIT_REMOTE_WSGI |
|
44 | 44 | if not factory: |
|
45 |
log.error(' |
|
|
45 | log.error('VCSServer has not been initialized yet') | |
|
46 | 46 | |
|
47 | 47 | return wsgi_app_caller_client.RemoteAppCaller( |
|
48 | 48 | factory, repo_path, repo_name, config) |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | def create_hg_wsgi_app(repo_path, repo_name, config): |
|
52 | 52 | """ |
|
53 | 53 | Return a WSGI app backed by a remote app to handle Mercurial. |
|
54 | 54 | |
|
55 | 55 | config is a list of 3-item tuples representing a ConfigObject (it is the |
|
56 | 56 | serialized version of the config object). |
|
57 | 57 | """ |
|
58 | 58 | factory = HG_REMOTE_WSGI |
|
59 | 59 | if not factory: |
|
60 |
log.error(' |
|
|
60 | log.error('VCSServer has not been initialized yet') | |
|
61 | 61 | |
|
62 | 62 | return wsgi_app_caller_client.RemoteAppCaller( |
|
63 | 63 | factory, repo_path, repo_name, config) |
@@ -1,98 +1,90 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Utility to call a WSGI app wrapped in a WSGIAppCaller object. |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | from Pyro4.errors import ConnectionClosedError | |
|
28 | ||
|
29 | 27 | |
|
30 | 28 | log = logging.getLogger(__name__) |
|
31 | 29 | |
|
32 | 30 | |
|
33 | 31 | def _get_clean_environ(environ): |
|
34 | 32 | """Return a copy of the WSGI environment without wsgi.* keys. |
|
35 | 33 | |
|
36 | 34 | It also omits any non-string values. |
|
37 | 35 | |
|
38 | 36 | :param environ: WSGI environment to clean |
|
39 | 37 | :type environ: dict |
|
40 | 38 | |
|
41 | 39 | :returns: WSGI environment to pass to WSGIAppCaller.handle. |
|
42 | 40 | :rtype: dict |
|
43 | 41 | """ |
|
44 | 42 | clean_environ = dict( |
|
45 | 43 | (k, v) for k, v in environ.iteritems() |
|
46 | 44 | if type(v) == str and type(k) == str and not k.startswith('wsgi.') |
|
47 | 45 | ) |
|
48 | 46 | |
|
49 | 47 | return clean_environ |
|
50 | 48 | |
|
51 | 49 | |
|
52 | 50 | # pylint: disable=too-few-public-methods |
|
53 | 51 | class RemoteAppCaller(object): |
|
54 | 52 | """Create and calls a remote WSGI app using the given factory. |
|
55 | 53 | |
|
56 | 54 | It first cleans the environment, so as to reduce the data transferred. |
|
57 | 55 | """ |
|
58 | 56 | |
|
59 | 57 | def __init__(self, remote_wsgi, *args, **kwargs): |
|
60 | 58 | """ |
|
61 | 59 | :param remote_wsgi: The remote wsgi object that creates a |
|
62 | 60 | WSGIAppCaller. This object |
|
63 | 61 | has to have a handle method, with the signature: |
|
64 | 62 | handle(environ, start_response, *args, **kwargs) |
|
65 | 63 | :param args: args to be passed to the app creation |
|
66 | 64 | :param kwargs: kwargs to be passed to the app creation |
|
67 | 65 | """ |
|
68 | 66 | self._remote_wsgi = remote_wsgi |
|
69 | 67 | self._args = args |
|
70 | 68 | self._kwargs = kwargs |
|
71 | 69 | |
|
72 | 70 | def __call__(self, environ, start_response): |
|
73 | 71 | """ |
|
74 | 72 | :param environ: WSGI environment with which the app will be run |
|
75 | 73 | :type environ: dict |
|
76 | 74 | :param start_response: callable of WSGI protocol |
|
77 | 75 | :type start_response: callable |
|
78 | 76 | |
|
79 | 77 | :returns: an iterable with the data returned by the app |
|
80 | 78 | :rtype: iterable<str> |
|
81 | 79 | """ |
|
82 | 80 | log.debug("Forwarding WSGI request via proxy %s", self._remote_wsgi) |
|
83 | 81 | input_data = environ['wsgi.input'].read() |
|
84 | 82 | clean_environ = _get_clean_environ(environ) |
|
85 | 83 | |
|
86 | try: | |
|
87 | data, status, headers = self._remote_wsgi.handle( | |
|
88 | clean_environ, input_data, *self._args, **self._kwargs) | |
|
89 | except ConnectionClosedError: | |
|
90 | log.debug('Remote Pyro Server ConnectionClosedError') | |
|
91 | self._remote_wsgi._pyroReconnect(tries=15) | |
|
92 | 84 |
|
|
93 | 85 |
|
|
94 | 86 | |
|
95 | 87 | log.debug("Got result from proxy, returning to WSGI container") |
|
96 | 88 | start_response(status, headers) |
|
97 | 89 | |
|
98 | 90 | return data |
@@ -1,235 +1,233 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import gzip |
|
22 | 22 | import shutil |
|
23 | 23 | import logging |
|
24 | 24 | import tempfile |
|
25 | 25 | import urlparse |
|
26 | 26 | |
|
27 | 27 | from webob.exc import HTTPNotFound |
|
28 | 28 | |
|
29 | 29 | import rhodecode |
|
30 | 30 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
31 | 31 | from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT |
|
32 | 32 | from rhodecode.lib.middleware.simplehg import SimpleHg |
|
33 | 33 | from rhodecode.lib.middleware.simplesvn import SimpleSvn |
|
34 | 34 | from rhodecode.model.settings import VcsSettingsModel |
|
35 | 35 | |
|
36 | 36 | log = logging.getLogger(__name__) |
|
37 | 37 | |
|
38 | 38 | VCS_TYPE_KEY = '_rc_vcs_type' |
|
39 | 39 | VCS_TYPE_SKIP = '_rc_vcs_skip' |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | def is_git(environ): |
|
43 | 43 | """ |
|
44 | 44 | Returns True if requests should be handled by GIT wsgi middleware |
|
45 | 45 | """ |
|
46 | 46 | is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO']) |
|
47 | 47 | log.debug( |
|
48 | 48 | 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'], |
|
49 | 49 | is_git_path is not None) |
|
50 | 50 | |
|
51 | 51 | return is_git_path |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | def is_hg(environ): |
|
55 | 55 | """ |
|
56 | 56 | Returns True if requests target is mercurial server - header |
|
57 | 57 | ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``. |
|
58 | 58 | """ |
|
59 | 59 | is_hg_path = False |
|
60 | 60 | |
|
61 | 61 | http_accept = environ.get('HTTP_ACCEPT') |
|
62 | 62 | |
|
63 | 63 | if http_accept and http_accept.startswith('application/mercurial'): |
|
64 | 64 | query = urlparse.parse_qs(environ['QUERY_STRING']) |
|
65 | 65 | if 'cmd' in query: |
|
66 | 66 | is_hg_path = True |
|
67 | 67 | |
|
68 | 68 | log.debug( |
|
69 | 69 | 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'], |
|
70 | 70 | is_hg_path) |
|
71 | 71 | |
|
72 | 72 | return is_hg_path |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | def is_svn(environ): |
|
76 | 76 | """ |
|
77 | 77 | Returns True if requests target is Subversion server |
|
78 | 78 | """ |
|
79 | 79 | http_dav = environ.get('HTTP_DAV', '') |
|
80 | 80 | magic_path_segment = rhodecode.CONFIG.get( |
|
81 | 81 | 'rhodecode_subversion_magic_path', '/!svn') |
|
82 | 82 | is_svn_path = ( |
|
83 | 83 | 'subversion' in http_dav or |
|
84 | 84 | magic_path_segment in environ['PATH_INFO']) |
|
85 | 85 | log.debug( |
|
86 | 86 | 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'], |
|
87 | 87 | is_svn_path) |
|
88 | 88 | |
|
89 | 89 | return is_svn_path |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | class GunzipMiddleware(object): |
|
93 | 93 | """ |
|
94 | 94 | WSGI middleware that unzips gzip-encoded requests before |
|
95 | 95 | passing on to the underlying application. |
|
96 | 96 | """ |
|
97 | 97 | |
|
98 | 98 | def __init__(self, application): |
|
99 | 99 | self.app = application |
|
100 | 100 | |
|
101 | 101 | def __call__(self, environ, start_response): |
|
102 | 102 | accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'') |
|
103 | 103 | |
|
104 | 104 | if b'gzip' in accepts_encoding_header: |
|
105 | 105 | log.debug('gzip detected, now running gunzip wrapper') |
|
106 | 106 | wsgi_input = environ['wsgi.input'] |
|
107 | 107 | |
|
108 | 108 | if not hasattr(environ['wsgi.input'], 'seek'): |
|
109 | 109 | # The gzip implementation in the standard library of Python 2.x |
|
110 | 110 | # requires the '.seek()' and '.tell()' methods to be available |
|
111 | 111 | # on the input stream. Read the data into a temporary file to |
|
112 | 112 | # work around this limitation. |
|
113 | 113 | |
|
114 | 114 | wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024) |
|
115 | 115 | shutil.copyfileobj(environ['wsgi.input'], wsgi_input) |
|
116 | 116 | wsgi_input.seek(0) |
|
117 | 117 | |
|
118 | 118 | environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r') |
|
119 | 119 | # since we "Ungzipped" the content we say now it's no longer gzip |
|
120 | 120 | # content encoding |
|
121 | 121 | del environ['HTTP_CONTENT_ENCODING'] |
|
122 | 122 | |
|
123 | 123 | # content length has changes ? or i'm not sure |
|
124 | 124 | if 'CONTENT_LENGTH' in environ: |
|
125 | 125 | del environ['CONTENT_LENGTH'] |
|
126 | 126 | else: |
|
127 | 127 | log.debug('content not gzipped, gzipMiddleware passing ' |
|
128 | 128 | 'request further') |
|
129 | 129 | return self.app(environ, start_response) |
|
130 | 130 | |
|
131 | 131 | |
|
132 | 132 | def is_vcs_call(environ): |
|
133 | 133 | if VCS_TYPE_KEY in environ: |
|
134 | 134 | raw_type = environ[VCS_TYPE_KEY] |
|
135 | 135 | return raw_type and raw_type != VCS_TYPE_SKIP |
|
136 | 136 | return False |
|
137 | 137 | |
|
138 | 138 | |
|
139 | 139 | def detect_vcs_request(environ, backends): |
|
140 | 140 | checks = { |
|
141 | 141 | 'hg': (is_hg, SimpleHg), |
|
142 | 142 | 'git': (is_git, SimpleGit), |
|
143 | 143 | 'svn': (is_svn, SimpleSvn), |
|
144 | 144 | } |
|
145 | 145 | handler = None |
|
146 | 146 | |
|
147 | 147 | if VCS_TYPE_KEY in environ: |
|
148 | 148 | raw_type = environ[VCS_TYPE_KEY] |
|
149 | 149 | if raw_type == VCS_TYPE_SKIP: |
|
150 | 150 | log.debug('got `skip` marker for vcs detection, skipping...') |
|
151 | 151 | return handler |
|
152 | 152 | |
|
153 | 153 | _check, handler = checks.get(raw_type) or [None, None] |
|
154 | 154 | if handler: |
|
155 | 155 | log.debug('got handler:%s from environ', handler) |
|
156 | 156 | |
|
157 | 157 | if not handler: |
|
158 | 158 | log.debug('checking if request is of VCS type in order: %s', backends) |
|
159 | 159 | for vcs_type in backends: |
|
160 | 160 | vcs_check, _handler = checks[vcs_type] |
|
161 | 161 | if vcs_check(environ): |
|
162 | 162 | log.debug('vcs handler found %s', _handler) |
|
163 | 163 | handler = _handler |
|
164 | 164 | break |
|
165 | 165 | |
|
166 | 166 | return handler |
|
167 | 167 | |
|
168 | 168 | |
|
169 | 169 | class VCSMiddleware(object): |
|
170 | 170 | |
|
171 | 171 | def __init__(self, app, config, appenlight_client, registry): |
|
172 | 172 | self.application = app |
|
173 | 173 | self.config = config |
|
174 | 174 | self.appenlight_client = appenlight_client |
|
175 | 175 | self.registry = registry |
|
176 | 176 | self.use_gzip = True |
|
177 | 177 | # order in which we check the middlewares, based on vcs.backends config |
|
178 | 178 | self.check_middlewares = config['vcs.backends'] |
|
179 | 179 | |
|
180 | 180 | def vcs_config(self, repo_name=None): |
|
181 | 181 | """ |
|
182 | 182 | returns serialized VcsSettings |
|
183 | 183 | """ |
|
184 | 184 | return VcsSettingsModel(repo=repo_name).get_ui_settings_as_config_obj() |
|
185 | 185 | |
|
186 | 186 | def wrap_in_gzip_if_enabled(self, app, config): |
|
187 | 187 | if self.use_gzip: |
|
188 | 188 | app = GunzipMiddleware(app) |
|
189 | 189 | return app |
|
190 | 190 | |
|
191 | 191 | def _get_handler_app(self, environ): |
|
192 | 192 | app = None |
|
193 | 193 | log.debug('VCSMiddleware: detecting vcs type.') |
|
194 | 194 | handler = detect_vcs_request(environ, self.check_middlewares) |
|
195 | 195 | if handler: |
|
196 | 196 | app = handler(self.application, self.config, self.registry) |
|
197 | 197 | |
|
198 | 198 | return app |
|
199 | 199 | |
|
200 | 200 | def __call__(self, environ, start_response): |
|
201 | 201 | # check if we handle one of interesting protocols, optionally extract |
|
202 | 202 | # specific vcsSettings and allow changes of how things are wrapped |
|
203 | 203 | vcs_handler = self._get_handler_app(environ) |
|
204 | 204 | if vcs_handler: |
|
205 | 205 | # translate the _REPO_ID into real repo NAME for usage |
|
206 | 206 | # in middleware |
|
207 | 207 | environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO']) |
|
208 | 208 | |
|
209 | 209 | # Set acl, url and vcs repo names. |
|
210 | 210 | vcs_handler.set_repo_names(environ) |
|
211 | 211 | |
|
212 | 212 | # check for type, presence in database and on filesystem |
|
213 | 213 | if not vcs_handler.is_valid_and_existing_repo( |
|
214 | 214 | vcs_handler.acl_repo_name, |
|
215 | 215 | vcs_handler.basepath, |
|
216 | 216 | vcs_handler.SCM): |
|
217 | 217 | return HTTPNotFound()(environ, start_response) |
|
218 | 218 | |
|
219 | # TODO: johbo: Needed for the Pyro4 backend and Mercurial only. | |
|
220 | # Remove once we fully switched to the HTTP backend. | |
|
221 | 219 | environ['REPO_NAME'] = vcs_handler.url_repo_name |
|
222 | 220 | |
|
223 | 221 | # register repo config back to the handler |
|
224 | 222 | vcs_handler.repo_vcs_config = self.vcs_config( |
|
225 | 223 | vcs_handler.acl_repo_name) |
|
226 | 224 | |
|
227 | 225 | # Wrap handler in middlewares if they are enabled. |
|
228 | 226 | vcs_handler = self.wrap_in_gzip_if_enabled( |
|
229 | 227 | vcs_handler, self.config) |
|
230 | 228 | vcs_handler, _ = wrap_in_appenlight_if_enabled( |
|
231 | 229 | vcs_handler, self.config, self.appenlight_client) |
|
232 | 230 | |
|
233 | 231 | return vcs_handler(environ, start_response) |
|
234 | 232 | |
|
235 | 233 | return self.application(environ, start_response) |
@@ -1,304 +1,241 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Various version Control System version lib (vcs) management abstraction layer |
|
23 | 23 | for Python. Build with server client architecture. |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | VERSION = (0, 5, 0, 'dev') |
|
28 | 28 | |
|
29 | 29 | __version__ = '.'.join((str(each) for each in VERSION[:4])) |
|
30 | 30 | |
|
31 | 31 | __all__ = [ |
|
32 | 32 | 'get_version', 'get_vcs_instance', 'get_backend', |
|
33 | 33 | 'VCSError', 'RepositoryError', 'CommitError' |
|
34 | 34 | ] |
|
35 | 35 | |
|
36 | 36 | import atexit |
|
37 | 37 | import logging |
|
38 | 38 | import subprocess32 |
|
39 | 39 | import time |
|
40 | 40 | import urlparse |
|
41 | 41 | from cStringIO import StringIO |
|
42 | 42 | |
|
43 | import Pyro4 | |
|
44 | from Pyro4.errors import CommunicationError | |
|
45 | 43 | |
|
46 | 44 | from rhodecode.lib.vcs.conf import settings |
|
47 | 45 | from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend |
|
48 | 46 | from rhodecode.lib.vcs.exceptions import ( |
|
49 | 47 | VCSError, RepositoryError, CommitError, VCSCommunicationError) |
|
50 | 48 | |
|
51 | 49 | log = logging.getLogger(__name__) |
|
52 | 50 | |
|
53 | 51 | # The pycurl library directly accesses C API functions and is not patched by |
|
54 | 52 | # gevent. This will potentially lead to deadlocks due to incompatibility to |
|
55 | 53 | # gevent. Therefore we check if gevent is active and import a gevent compatible |
|
56 | 54 | # wrapper in that case. |
|
57 | 55 | try: |
|
58 | 56 | from gevent import monkey |
|
59 | 57 | if monkey.is_module_patched('__builtin__'): |
|
60 | 58 | import geventcurl as pycurl |
|
61 | 59 | log.debug('Using gevent comapatible pycurl: %s', pycurl) |
|
62 | 60 | else: |
|
63 | 61 | import pycurl |
|
64 | 62 | except ImportError: |
|
65 | 63 | import pycurl |
|
66 | 64 | |
|
67 | 65 | |
|
68 | 66 | def get_version(): |
|
69 | 67 | """ |
|
70 | 68 | Returns shorter version (digit parts only) as string. |
|
71 | 69 | """ |
|
72 | 70 | return '.'.join((str(each) for each in VERSION[:3])) |
|
73 | 71 | |
|
74 | 72 | |
|
75 | def connect_pyro4(server_and_port): | |
|
76 | from rhodecode.lib.vcs import connection, client | |
|
77 | from rhodecode.lib.middleware.utils import scm_app | |
|
78 | ||
|
79 | git_remote = client.RequestScopeProxyFactory( | |
|
80 | settings.pyro_remote(settings.PYRO_GIT, server_and_port)) | |
|
81 | hg_remote = client.RequestScopeProxyFactory( | |
|
82 | settings.pyro_remote(settings.PYRO_HG, server_and_port)) | |
|
83 | svn_remote = client.RequestScopeProxyFactory( | |
|
84 | settings.pyro_remote(settings.PYRO_SVN, server_and_port)) | |
|
85 | ||
|
86 | connection.Git = client.RepoMaker(proxy_factory=git_remote) | |
|
87 | connection.Hg = client.RepoMaker(proxy_factory=hg_remote) | |
|
88 | connection.Svn = client.RepoMaker(proxy_factory=svn_remote) | |
|
89 | ||
|
90 | scm_app.GIT_REMOTE_WSGI = Pyro4.Proxy( | |
|
91 | settings.pyro_remote( | |
|
92 | settings.PYRO_GIT_REMOTE_WSGI, server_and_port)) | |
|
93 | scm_app.HG_REMOTE_WSGI = Pyro4.Proxy( | |
|
94 | settings.pyro_remote( | |
|
95 | settings.PYRO_HG_REMOTE_WSGI, server_and_port)) | |
|
96 | ||
|
97 | @atexit.register | |
|
98 | def free_connection_resources(): | |
|
99 | connection.Git = None | |
|
100 | connection.Hg = None | |
|
101 | connection.Svn = None | |
|
102 | connection.Service = None | |
|
103 | ||
|
104 | ||
|
105 | 73 | def connect_http(server_and_port): |
|
106 | 74 | from rhodecode.lib.vcs import connection, client_http |
|
107 | 75 | from rhodecode.lib.middleware.utils import scm_app |
|
108 | 76 | |
|
109 | 77 | session_factory = client_http.ThreadlocalSessionFactory() |
|
110 | 78 | |
|
111 | 79 | connection.Git = client_http.RepoMaker( |
|
112 | 80 | server_and_port, '/git', 'git', session_factory) |
|
113 | 81 | connection.Hg = client_http.RepoMaker( |
|
114 | 82 | server_and_port, '/hg', 'hg', session_factory) |
|
115 | 83 | connection.Svn = client_http.RepoMaker( |
|
116 | 84 | server_and_port, '/svn', 'svn', session_factory) |
|
117 | 85 | connection.Service = client_http.ServiceConnection( |
|
118 | 86 | server_and_port, '/_service', session_factory) |
|
119 | 87 | |
|
120 | 88 | scm_app.HG_REMOTE_WSGI = client_http.VcsHttpProxy( |
|
121 | 89 | server_and_port, '/proxy/hg') |
|
122 | 90 | scm_app.GIT_REMOTE_WSGI = client_http.VcsHttpProxy( |
|
123 | 91 | server_and_port, '/proxy/git') |
|
124 | 92 | |
|
125 | 93 | @atexit.register |
|
126 | 94 | def free_connection_resources(): |
|
127 | 95 | connection.Git = None |
|
128 | 96 | connection.Hg = None |
|
129 | 97 | connection.Svn = None |
|
130 | 98 | connection.Service = None |
|
131 | 99 | |
|
132 | 100 | |
|
133 | 101 | def connect_vcs(server_and_port, protocol): |
|
134 | 102 | """ |
|
135 | 103 | Initializes the connection to the vcs server. |
|
136 | 104 | |
|
137 | 105 | :param server_and_port: str, e.g. "localhost:9900" |
|
138 |
:param protocol: str |
|
|
106 | :param protocol: str or "http" | |
|
139 | 107 | """ |
|
140 |
if protocol == 'p |
|
|
141 | connect_pyro4(server_and_port) | |
|
142 | elif protocol == 'http': | |
|
108 | if protocol == 'http': | |
|
143 | 109 | connect_http(server_and_port) |
|
144 | 110 | else: |
|
145 | 111 | raise Exception('Invalid vcs server protocol "{}"'.format(protocol)) |
|
146 | 112 | |
|
147 | 113 | |
|
148 | 114 | # TODO: johbo: This function should be moved into our test suite, there is |
|
149 | 115 | # no reason to support starting the vcsserver in Enterprise itself. |
|
150 | 116 | def start_vcs_server(server_and_port, protocol, log_level=None): |
|
151 | 117 | """ |
|
152 | 118 | Starts the vcs server in a subprocess. |
|
153 | 119 | """ |
|
154 | 120 | log.info('Starting VCSServer as a sub process with %s protocol', protocol) |
|
155 | 121 | if protocol == 'http': |
|
156 | 122 | return _start_http_vcs_server(server_and_port, log_level) |
|
157 | elif protocol == 'pyro4': | |
|
158 | return _start_pyro4_vcs_server(server_and_port, log_level) | |
|
159 | 123 | else: |
|
160 | 124 | raise Exception('Invalid vcs server protocol "{}"'.format(protocol)) |
|
161 | 125 | |
|
162 | 126 | |
|
163 | def _start_pyro4_vcs_server(server_and_port, log_level=None): | |
|
164 | _try_to_shutdown_running_server(server_and_port, protocol='pyro4') | |
|
165 | host, port = server_and_port.rsplit(":", 1) | |
|
166 | host = host.strip('[]') | |
|
167 | args = [ | |
|
168 | 'vcsserver', '--port', port, '--host', host, '--locale', 'en_US.UTF-8', | |
|
169 | '--threadpool', '32'] | |
|
170 | if log_level: | |
|
171 | args += ['--log-level', log_level] | |
|
172 | proc = subprocess32.Popen(args) | |
|
173 | ||
|
174 | def cleanup_server_process(): | |
|
175 | proc.kill() | |
|
176 | atexit.register(cleanup_server_process) | |
|
177 | ||
|
178 | server = create_vcsserver_proxy(server_and_port, protocol='pyro4') | |
|
179 | _wait_until_vcs_server_is_reachable(server) | |
|
180 | ||
|
181 | ||
|
182 | 127 | def _start_http_vcs_server(server_and_port, log_level=None): |
|
183 | 128 | # TODO: mikhail: shutdown if an http server already runs |
|
184 | 129 | |
|
185 | 130 | host, port = server_and_port.rsplit(":", 1) |
|
186 | 131 | args = [ |
|
187 | 132 | 'pserve', 'rhodecode/tests/vcsserver_http.ini', |
|
188 | 133 | 'http_port=%s' % (port, ), 'http_host=%s' % (host, )] |
|
189 | 134 | proc = subprocess32.Popen(args) |
|
190 | 135 | |
|
191 | 136 | def cleanup_server_process(): |
|
192 | 137 | proc.kill() |
|
193 | 138 | atexit.register(cleanup_server_process) |
|
194 | 139 | |
|
195 | 140 | server = create_vcsserver_proxy(server_and_port, protocol='http') |
|
196 | 141 | _wait_until_vcs_server_is_reachable(server) |
|
197 | 142 | |
|
198 | 143 | |
|
199 | 144 | def _wait_until_vcs_server_is_reachable(server, timeout=40): |
|
200 | 145 | begin = time.time() |
|
201 | 146 | while (time.time() - begin) < timeout: |
|
202 | 147 | try: |
|
203 | 148 | server.ping() |
|
204 | 149 | return |
|
205 |
except (VCSCommunicationError, |
|
|
150 | except (VCSCommunicationError, pycurl.error): | |
|
206 | 151 | log.debug('VCSServer not started yet, retry to connect.') |
|
207 | 152 | time.sleep(0.5) |
|
208 | 153 | raise Exception( |
|
209 | 154 | 'Starting the VCSServer failed or took more than {} ' |
|
210 | 155 | 'seconds.'.format(timeout)) |
|
211 | 156 | |
|
212 | 157 | |
|
213 | 158 | def _try_to_shutdown_running_server(server_and_port, protocol): |
|
214 | 159 | server = create_vcsserver_proxy(server_and_port, protocol) |
|
215 | 160 | try: |
|
216 | 161 | server.shutdown() |
|
217 |
except |
|
|
162 | except pycurl.error: | |
|
218 | 163 | return |
|
219 | 164 | |
|
220 | 165 | # TODO: Not sure why this is important, but without it the following start |
|
221 | 166 | # of the server fails. |
|
222 | 167 | server = create_vcsserver_proxy(server_and_port, protocol) |
|
223 | 168 | server.ping() |
|
224 | 169 | |
|
225 | 170 | |
|
226 | 171 | def create_vcsserver_proxy(server_and_port, protocol): |
|
227 |
if protocol == 'p |
|
|
228 | return _create_vcsserver_proxy_pyro4(server_and_port) | |
|
229 | elif protocol == 'http': | |
|
172 | if protocol == 'http': | |
|
230 | 173 | return _create_vcsserver_proxy_http(server_and_port) |
|
231 | 174 | else: |
|
232 | 175 | raise Exception('Invalid vcs server protocol "{}"'.format(protocol)) |
|
233 | 176 | |
|
234 | 177 | |
|
235 | def _create_vcsserver_proxy_pyro4(server_and_port): | |
|
236 | server = Pyro4.Proxy( | |
|
237 | settings.pyro_remote(settings.PYRO_VCSSERVER, server_and_port)) | |
|
238 | return server | |
|
239 | ||
|
240 | ||
|
241 | 178 | def _create_vcsserver_proxy_http(server_and_port): |
|
242 | 179 | from rhodecode.lib.vcs import client_http |
|
243 | 180 | |
|
244 | 181 | session = _create_http_rpc_session() |
|
245 | 182 | url = urlparse.urljoin('http://%s' % server_and_port, '/server') |
|
246 | 183 | return client_http.RemoteObject(url, session) |
|
247 | 184 | |
|
248 | 185 | |
|
249 | 186 | class CurlSession(object): |
|
250 | 187 | """ |
|
251 | 188 | Modeled so that it provides a subset of the requests interface. |
|
252 | 189 | |
|
253 | 190 | This has been created so that it does only provide a minimal API for our |
|
254 | 191 | needs. The parts which it provides are based on the API of the library |
|
255 | 192 | `requests` which allows us to easily benchmark against it. |
|
256 | 193 | |
|
257 | 194 | Please have a look at the class :class:`requests.Session` when you extend |
|
258 | 195 | it. |
|
259 | 196 | """ |
|
260 | 197 | |
|
261 | 198 | def __init__(self): |
|
262 | 199 | curl = pycurl.Curl() |
|
263 | 200 | # TODO: johbo: I did test with 7.19 of libcurl. This version has |
|
264 | 201 | # trouble with 100 - continue being set in the expect header. This |
|
265 | 202 | # can lead to massive performance drops, switching it off here. |
|
266 | 203 | curl.setopt(curl.HTTPHEADER, ["Expect:"]) |
|
267 | 204 | curl.setopt(curl.TCP_NODELAY, True) |
|
268 | 205 | curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP) |
|
269 | 206 | self._curl = curl |
|
270 | 207 | |
|
271 | 208 | def post(self, url, data, allow_redirects=False): |
|
272 | 209 | response_buffer = StringIO() |
|
273 | 210 | |
|
274 | 211 | curl = self._curl |
|
275 | 212 | curl.setopt(curl.URL, url) |
|
276 | 213 | curl.setopt(curl.POST, True) |
|
277 | 214 | curl.setopt(curl.POSTFIELDS, data) |
|
278 | 215 | curl.setopt(curl.FOLLOWLOCATION, allow_redirects) |
|
279 | 216 | curl.setopt(curl.WRITEDATA, response_buffer) |
|
280 | 217 | curl.perform() |
|
281 | 218 | |
|
282 | 219 | return CurlResponse(response_buffer) |
|
283 | 220 | |
|
284 | 221 | |
|
285 | 222 | class CurlResponse(object): |
|
286 | 223 | """ |
|
287 | 224 | The response of a request, modeled after the requests API. |
|
288 | 225 | |
|
289 | 226 | This class provides a subset of the response interface known from the |
|
290 | 227 | library `requests`. It is intentionally kept similar, so that we can use |
|
291 | 228 | `requests` as a drop in replacement for benchmarking purposes. |
|
292 | 229 | """ |
|
293 | 230 | |
|
294 | 231 | def __init__(self, response_buffer): |
|
295 | 232 | self._response_buffer = response_buffer |
|
296 | 233 | |
|
297 | 234 | @property |
|
298 | 235 | def content(self): |
|
299 | 236 | return self._response_buffer.getvalue() |
|
300 | 237 | |
|
301 | 238 | |
|
302 | 239 | def _create_http_rpc_session(): |
|
303 | 240 | session = CurlSession() |
|
304 | 241 | return session |
@@ -1,291 +1,284 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Client for the VCSServer implemented based on HTTP. |
|
23 | ||
|
24 | ||
|
25 | Status | |
|
26 | ------ | |
|
27 | ||
|
28 | This client implementation shall eventually replace the Pyro4 based | |
|
29 | implementation. | |
|
30 | 23 | """ |
|
31 | 24 | |
|
32 | 25 | import copy |
|
33 | 26 | import logging |
|
34 | 27 | import threading |
|
35 | 28 | import urllib2 |
|
36 | 29 | import urlparse |
|
37 | 30 | import uuid |
|
38 | 31 | |
|
39 | 32 | import pycurl |
|
40 | 33 | import msgpack |
|
41 | 34 | import requests |
|
42 | 35 | |
|
43 | 36 | from . import exceptions, CurlSession |
|
44 | 37 | |
|
45 | 38 | |
|
46 | 39 | log = logging.getLogger(__name__) |
|
47 | 40 | |
|
48 | 41 | |
|
49 | 42 | # TODO: mikhail: Keep it in sync with vcsserver's |
|
50 | 43 | # HTTPApplication.ALLOWED_EXCEPTIONS |
|
51 | 44 | EXCEPTIONS_MAP = { |
|
52 | 45 | 'KeyError': KeyError, |
|
53 | 46 | 'URLError': urllib2.URLError, |
|
54 | 47 | } |
|
55 | 48 | |
|
56 | 49 | |
|
57 | 50 | class RepoMaker(object): |
|
58 | 51 | |
|
59 | 52 | def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory): |
|
60 | 53 | self.url = urlparse.urljoin( |
|
61 | 54 | 'http://%s' % server_and_port, backend_endpoint) |
|
62 | 55 | self._session_factory = session_factory |
|
63 | 56 | self.backend_type = backend_type |
|
64 | 57 | |
|
65 | 58 | def __call__(self, path, config, with_wire=None): |
|
66 | 59 | log.debug('RepoMaker call on %s', path) |
|
67 | 60 | return RemoteRepo( |
|
68 | 61 | path, config, self.url, self._session_factory(), |
|
69 | 62 | with_wire=with_wire) |
|
70 | 63 | |
|
71 | 64 | def __getattr__(self, name): |
|
72 | 65 | def f(*args, **kwargs): |
|
73 | 66 | return self._call(name, *args, **kwargs) |
|
74 | 67 | return f |
|
75 | 68 | |
|
76 | 69 | @exceptions.map_vcs_exceptions |
|
77 | 70 | def _call(self, name, *args, **kwargs): |
|
78 | 71 | payload = { |
|
79 | 72 | 'id': str(uuid.uuid4()), |
|
80 | 73 | 'method': name, |
|
81 | 74 | 'backend': self.backend_type, |
|
82 | 75 | 'params': {'args': args, 'kwargs': kwargs} |
|
83 | 76 | } |
|
84 | 77 | return _remote_call( |
|
85 | 78 | self.url, payload, EXCEPTIONS_MAP, self._session_factory()) |
|
86 | 79 | |
|
87 | 80 | |
|
88 | 81 | class ServiceConnection(object): |
|
89 | 82 | def __init__(self, server_and_port, backend_endpoint, session_factory): |
|
90 | 83 | self.url = urlparse.urljoin( |
|
91 | 84 | 'http://%s' % server_and_port, backend_endpoint) |
|
92 | 85 | self._session_factory = session_factory |
|
93 | 86 | |
|
94 | 87 | def __getattr__(self, name): |
|
95 | 88 | def f(*args, **kwargs): |
|
96 | 89 | return self._call(name, *args, **kwargs) |
|
97 | 90 | |
|
98 | 91 | return f |
|
99 | 92 | |
|
100 | 93 | @exceptions.map_vcs_exceptions |
|
101 | 94 | def _call(self, name, *args, **kwargs): |
|
102 | 95 | payload = { |
|
103 | 96 | 'id': str(uuid.uuid4()), |
|
104 | 97 | 'method': name, |
|
105 | 98 | 'params': {'args': args, 'kwargs': kwargs} |
|
106 | 99 | } |
|
107 | 100 | return _remote_call( |
|
108 | 101 | self.url, payload, EXCEPTIONS_MAP, self._session_factory()) |
|
109 | 102 | |
|
110 | 103 | |
|
111 | 104 | class RemoteRepo(object): |
|
112 | 105 | |
|
113 | 106 | def __init__(self, path, config, url, session, with_wire=None): |
|
114 | 107 | self.url = url |
|
115 | 108 | self._session = session |
|
116 | 109 | self._wire = { |
|
117 | 110 | "path": path, |
|
118 | 111 | "config": config, |
|
119 | 112 | "context": self._create_vcs_cache_context(), |
|
120 | 113 | } |
|
121 | 114 | if with_wire: |
|
122 | 115 | self._wire.update(with_wire) |
|
123 | 116 | |
|
124 | 117 | # johbo: Trading complexity for performance. Avoiding the call to |
|
125 | 118 | # log.debug brings a few percent gain even if is is not active. |
|
126 | 119 | if log.isEnabledFor(logging.DEBUG): |
|
127 | 120 | self._call = self._call_with_logging |
|
128 | 121 | |
|
129 | 122 | def __getattr__(self, name): |
|
130 | 123 | def f(*args, **kwargs): |
|
131 | 124 | return self._call(name, *args, **kwargs) |
|
132 | 125 | return f |
|
133 | 126 | |
|
134 | 127 | @exceptions.map_vcs_exceptions |
|
135 | 128 | def _call(self, name, *args, **kwargs): |
|
136 | 129 | # TODO: oliver: This is currently necessary pre-call since the |
|
137 | 130 | # config object is being changed for hooking scenarios |
|
138 | 131 | wire = copy.deepcopy(self._wire) |
|
139 | 132 | wire["config"] = wire["config"].serialize() |
|
140 | 133 | payload = { |
|
141 | 134 | 'id': str(uuid.uuid4()), |
|
142 | 135 | 'method': name, |
|
143 | 136 | 'params': {'wire': wire, 'args': args, 'kwargs': kwargs} |
|
144 | 137 | } |
|
145 | 138 | return _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session) |
|
146 | 139 | |
|
147 | 140 | def _call_with_logging(self, name, *args, **kwargs): |
|
148 | 141 | log.debug('Calling %s@%s', self.url, name) |
|
149 | 142 | return RemoteRepo._call(self, name, *args, **kwargs) |
|
150 | 143 | |
|
151 | 144 | def __getitem__(self, key): |
|
152 | 145 | return self.revision(key) |
|
153 | 146 | |
|
154 | 147 | def _create_vcs_cache_context(self): |
|
155 | 148 | """ |
|
156 | 149 | Creates a unique string which is passed to the VCSServer on every |
|
157 | 150 | remote call. It is used as cache key in the VCSServer. |
|
158 | 151 | """ |
|
159 | 152 | return str(uuid.uuid4()) |
|
160 | 153 | |
|
161 | 154 | def invalidate_vcs_cache(self): |
|
162 | 155 | """ |
|
163 | 156 | This invalidates the context which is sent to the VCSServer on every |
|
164 | 157 | call to a remote method. It forces the VCSServer to create a fresh |
|
165 | 158 | repository instance on the next call to a remote method. |
|
166 | 159 | """ |
|
167 | 160 | self._wire['context'] = self._create_vcs_cache_context() |
|
168 | 161 | |
|
169 | 162 | |
|
170 | 163 | class RemoteObject(object): |
|
171 | 164 | |
|
172 | 165 | def __init__(self, url, session): |
|
173 | 166 | self._url = url |
|
174 | 167 | self._session = session |
|
175 | 168 | |
|
176 | 169 | # johbo: Trading complexity for performance. Avoiding the call to |
|
177 | 170 | # log.debug brings a few percent gain even if is is not active. |
|
178 | 171 | if log.isEnabledFor(logging.DEBUG): |
|
179 | 172 | self._call = self._call_with_logging |
|
180 | 173 | |
|
181 | 174 | def __getattr__(self, name): |
|
182 | 175 | def f(*args, **kwargs): |
|
183 | 176 | return self._call(name, *args, **kwargs) |
|
184 | 177 | return f |
|
185 | 178 | |
|
186 | 179 | @exceptions.map_vcs_exceptions |
|
187 | 180 | def _call(self, name, *args, **kwargs): |
|
188 | 181 | payload = { |
|
189 | 182 | 'id': str(uuid.uuid4()), |
|
190 | 183 | 'method': name, |
|
191 | 184 | 'params': {'args': args, 'kwargs': kwargs} |
|
192 | 185 | } |
|
193 | 186 | return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session) |
|
194 | 187 | |
|
195 | 188 | def _call_with_logging(self, name, *args, **kwargs): |
|
196 | 189 | log.debug('Calling %s@%s', self._url, name) |
|
197 | 190 | return RemoteObject._call(self, name, *args, **kwargs) |
|
198 | 191 | |
|
199 | 192 | |
|
200 | 193 | def _remote_call(url, payload, exceptions_map, session): |
|
201 | 194 | try: |
|
202 | 195 | response = session.post(url, data=msgpack.packb(payload)) |
|
203 | 196 | except pycurl.error as e: |
|
204 | 197 | raise exceptions.HttpVCSCommunicationError(e) |
|
205 | 198 | |
|
206 | 199 | try: |
|
207 | 200 | response = msgpack.unpackb(response.content) |
|
208 | 201 | except Exception: |
|
209 | 202 | log.exception('Failed to decode repsponse %r', response.content) |
|
210 | 203 | raise |
|
211 | 204 | |
|
212 | 205 | error = response.get('error') |
|
213 | 206 | if error: |
|
214 | 207 | type_ = error.get('type', 'Exception') |
|
215 | 208 | exc = exceptions_map.get(type_, Exception) |
|
216 | 209 | exc = exc(error.get('message')) |
|
217 | 210 | try: |
|
218 | 211 | exc._vcs_kind = error['_vcs_kind'] |
|
219 | 212 | except KeyError: |
|
220 | 213 | pass |
|
221 | 214 | |
|
222 | 215 | try: |
|
223 | 216 | exc._vcs_server_traceback = error['traceback'] |
|
224 | 217 | except KeyError: |
|
225 | 218 | pass |
|
226 | 219 | |
|
227 | 220 | raise exc |
|
228 | 221 | return response.get('result') |
|
229 | 222 | |
|
230 | 223 | |
|
231 | 224 | class VcsHttpProxy(object): |
|
232 | 225 | |
|
233 | 226 | CHUNK_SIZE = 16384 |
|
234 | 227 | |
|
235 | 228 | def __init__(self, server_and_port, backend_endpoint): |
|
236 | 229 | adapter = requests.adapters.HTTPAdapter(max_retries=5) |
|
237 | 230 | self.base_url = urlparse.urljoin( |
|
238 | 231 | 'http://%s' % server_and_port, backend_endpoint) |
|
239 | 232 | self.session = requests.Session() |
|
240 | 233 | self.session.mount('http://', adapter) |
|
241 | 234 | |
|
242 | 235 | def handle(self, environment, input_data, *args, **kwargs): |
|
243 | 236 | data = { |
|
244 | 237 | 'environment': environment, |
|
245 | 238 | 'input_data': input_data, |
|
246 | 239 | 'args': args, |
|
247 | 240 | 'kwargs': kwargs |
|
248 | 241 | } |
|
249 | 242 | result = self.session.post( |
|
250 | 243 | self.base_url, msgpack.packb(data), stream=True) |
|
251 | 244 | return self._get_result(result) |
|
252 | 245 | |
|
253 | 246 | def _deserialize_and_raise(self, error): |
|
254 | 247 | exception = Exception(error['message']) |
|
255 | 248 | try: |
|
256 | 249 | exception._vcs_kind = error['_vcs_kind'] |
|
257 | 250 | except KeyError: |
|
258 | 251 | pass |
|
259 | 252 | raise exception |
|
260 | 253 | |
|
261 | 254 | def _iterate(self, result): |
|
262 | 255 | unpacker = msgpack.Unpacker() |
|
263 | 256 | for line in result.iter_content(chunk_size=self.CHUNK_SIZE): |
|
264 | 257 | unpacker.feed(line) |
|
265 | 258 | for chunk in unpacker: |
|
266 | 259 | yield chunk |
|
267 | 260 | |
|
268 | 261 | def _get_result(self, result): |
|
269 | 262 | iterator = self._iterate(result) |
|
270 | 263 | error = iterator.next() |
|
271 | 264 | if error: |
|
272 | 265 | self._deserialize_and_raise(error) |
|
273 | 266 | |
|
274 | 267 | status = iterator.next() |
|
275 | 268 | headers = iterator.next() |
|
276 | 269 | |
|
277 | 270 | return iterator, status, headers |
|
278 | 271 | |
|
279 | 272 | |
|
280 | 273 | class ThreadlocalSessionFactory(object): |
|
281 | 274 | """ |
|
282 | 275 | Creates one CurlSession per thread on demand. |
|
283 | 276 | """ |
|
284 | 277 | |
|
285 | 278 | def __init__(self): |
|
286 | 279 | self._thread_local = threading.local() |
|
287 | 280 | |
|
288 | 281 | def __call__(self): |
|
289 | 282 | if not hasattr(self._thread_local, 'curl_session'): |
|
290 | 283 | self._thread_local.curl_session = CurlSession() |
|
291 | 284 | return self._thread_local.curl_session |
@@ -1,87 +1,66 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Internal settings for vcs-lib |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | # list of default encoding used in safe_unicode/safe_str methods |
|
26 | 26 | DEFAULT_ENCODINGS = ['utf8'] |
|
27 | 27 | |
|
28 | 28 | # Optional arguments to rev-filter, it has to be a list |
|
29 | 29 | # It can also be ['--branches', '--tags'] |
|
30 | 30 | GIT_REV_FILTER = ['--all'] |
|
31 | 31 | |
|
32 | 32 | # Compatibility version when creating SVN repositories. None means newest. |
|
33 | 33 | # Other available options are: pre-1.4-compatible, pre-1.5-compatible, |
|
34 | 34 | # pre-1.6-compatible, pre-1.8-compatible |
|
35 | 35 | SVN_COMPATIBLE_VERSION = None |
|
36 | 36 | |
|
37 | 37 | ALIASES = ['hg', 'git', 'svn'] |
|
38 | 38 | |
|
39 | 39 | BACKENDS = { |
|
40 | 40 | 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository', |
|
41 | 41 | 'git': 'rhodecode.lib.vcs.backends.git.GitRepository', |
|
42 | 42 | 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository', |
|
43 | 43 | } |
|
44 | 44 | |
|
45 | 45 | # TODO: Remove once controllers/files.py is adjusted |
|
46 | 46 | ARCHIVE_SPECS = { |
|
47 | 47 | 'tbz2': ('application/x-bzip2', '.tar.bz2'), |
|
48 | 48 | 'tgz': ('application/x-gzip', '.tar.gz'), |
|
49 | 49 | 'zip': ('application/zip', '.zip'), |
|
50 | 50 | } |
|
51 | 51 | |
|
52 | 52 | HOOKS_PROTOCOL = None |
|
53 | 53 | HOOKS_DIRECT_CALLS = False |
|
54 | 54 | |
|
55 | PYRO_PORT = 9900 | |
|
56 | ||
|
57 | PYRO_GIT = 'git_remote' | |
|
58 | PYRO_HG = 'hg_remote' | |
|
59 | PYRO_SVN = 'svn_remote' | |
|
60 | PYRO_VCSSERVER = 'vcs_server' | |
|
61 | PYRO_GIT_REMOTE_WSGI = 'git_remote_wsgi' | |
|
62 | PYRO_HG_REMOTE_WSGI = 'hg_remote_wsgi' | |
|
63 | ||
|
64 | PYRO_RECONNECT_TRIES = 15 | |
|
65 | """ | |
|
66 | How many retries to reconnect will be performed if the connection was lost. | |
|
67 | ||
|
68 | Each try takes 2s. Doing 15 means that we will give it up to 30s for a | |
|
69 | connection to be re-established. | |
|
70 | """ | |
|
71 | ||
|
72 | ||
|
73 | def pyro_remote(object_id, server_and_port): | |
|
74 | return "PYRO:%s@%s" % (object_id, server_and_port) | |
|
75 | ||
|
76 | 55 | |
|
77 | 56 | def available_aliases(): |
|
78 | 57 | """ |
|
79 | 58 | Mercurial is required for the system to work, so in case vcs.backends does |
|
80 | 59 | not include it, we make sure it will be available internally |
|
81 | 60 | TODO: anderson: refactor vcs.backends so it won't be necessary, VCS server |
|
82 | 61 | should be responsible to dictate available backends. |
|
83 | 62 | """ |
|
84 | 63 | aliases = ALIASES[:] |
|
85 | 64 | if 'hg' not in aliases: |
|
86 | 65 | aliases += ['hg'] |
|
87 | 66 | return aliases |
@@ -1,205 +1,201 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Custom vcs exceptions module. |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import functools |
|
26 | 26 | import urllib2 |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | class VCSCommunicationError(Exception): |
|
30 | 30 | pass |
|
31 | 31 | |
|
32 | 32 | |
|
33 | class PyroVCSCommunicationError(VCSCommunicationError): | |
|
34 | pass | |
|
35 | ||
|
36 | ||
|
37 | 33 | class HttpVCSCommunicationError(VCSCommunicationError): |
|
38 | 34 | pass |
|
39 | 35 | |
|
40 | 36 | |
|
41 | 37 | class VCSError(Exception): |
|
42 | 38 | pass |
|
43 | 39 | |
|
44 | 40 | |
|
45 | 41 | class RepositoryError(VCSError): |
|
46 | 42 | pass |
|
47 | 43 | |
|
48 | 44 | |
|
49 | 45 | class RepositoryRequirementError(RepositoryError): |
|
50 | 46 | pass |
|
51 | 47 | |
|
52 | 48 | |
|
53 | 49 | class VCSBackendNotSupportedError(VCSError): |
|
54 | 50 | """ |
|
55 | 51 | Exception raised when VCSServer does not support requested backend |
|
56 | 52 | """ |
|
57 | 53 | |
|
58 | 54 | |
|
59 | 55 | class EmptyRepositoryError(RepositoryError): |
|
60 | 56 | pass |
|
61 | 57 | |
|
62 | 58 | |
|
63 | 59 | class TagAlreadyExistError(RepositoryError): |
|
64 | 60 | pass |
|
65 | 61 | |
|
66 | 62 | |
|
67 | 63 | class TagDoesNotExistError(RepositoryError): |
|
68 | 64 | pass |
|
69 | 65 | |
|
70 | 66 | |
|
71 | 67 | class BranchAlreadyExistError(RepositoryError): |
|
72 | 68 | pass |
|
73 | 69 | |
|
74 | 70 | |
|
75 | 71 | class BranchDoesNotExistError(RepositoryError): |
|
76 | 72 | pass |
|
77 | 73 | |
|
78 | 74 | |
|
79 | 75 | class CommitError(RepositoryError): |
|
80 | 76 | """ |
|
81 | 77 | Exceptions related to an existing commit |
|
82 | 78 | """ |
|
83 | 79 | |
|
84 | 80 | |
|
85 | 81 | class CommitDoesNotExistError(CommitError): |
|
86 | 82 | pass |
|
87 | 83 | |
|
88 | 84 | |
|
89 | 85 | class CommittingError(RepositoryError): |
|
90 | 86 | """ |
|
91 | 87 | Exceptions happening while creating a new commit |
|
92 | 88 | """ |
|
93 | 89 | |
|
94 | 90 | |
|
95 | 91 | class NothingChangedError(CommittingError): |
|
96 | 92 | pass |
|
97 | 93 | |
|
98 | 94 | |
|
99 | 95 | class NodeError(VCSError): |
|
100 | 96 | pass |
|
101 | 97 | |
|
102 | 98 | |
|
103 | 99 | class RemovedFileNodeError(NodeError): |
|
104 | 100 | pass |
|
105 | 101 | |
|
106 | 102 | |
|
107 | 103 | class NodeAlreadyExistsError(CommittingError): |
|
108 | 104 | pass |
|
109 | 105 | |
|
110 | 106 | |
|
111 | 107 | class NodeAlreadyChangedError(CommittingError): |
|
112 | 108 | pass |
|
113 | 109 | |
|
114 | 110 | |
|
115 | 111 | class NodeDoesNotExistError(CommittingError): |
|
116 | 112 | pass |
|
117 | 113 | |
|
118 | 114 | |
|
119 | 115 | class NodeNotChangedError(CommittingError): |
|
120 | 116 | pass |
|
121 | 117 | |
|
122 | 118 | |
|
123 | 119 | class NodeAlreadyAddedError(CommittingError): |
|
124 | 120 | pass |
|
125 | 121 | |
|
126 | 122 | |
|
127 | 123 | class NodeAlreadyRemovedError(CommittingError): |
|
128 | 124 | pass |
|
129 | 125 | |
|
130 | 126 | |
|
131 | 127 | class SubrepoMergeError(RepositoryError): |
|
132 | 128 | """ |
|
133 | 129 | This happens if we try to merge a repository which contains subrepos and |
|
134 | 130 | the subrepos cannot be merged. The subrepos are not merged itself but |
|
135 | 131 | their references in the root repo are merged. |
|
136 | 132 | """ |
|
137 | 133 | |
|
138 | 134 | |
|
139 | 135 | class ImproperArchiveTypeError(VCSError): |
|
140 | 136 | pass |
|
141 | 137 | |
|
142 | 138 | |
|
143 | 139 | class CommandError(VCSError): |
|
144 | 140 | pass |
|
145 | 141 | |
|
146 | 142 | |
|
147 | 143 | class UnhandledException(VCSError): |
|
148 | 144 | """ |
|
149 | 145 | Signals that something unexpected went wrong. |
|
150 | 146 | |
|
151 | 147 | This usually means we have a programming error on the side of the VCSServer |
|
152 | 148 | and should inspect the logfile of the VCSServer to find more details. |
|
153 | 149 | """ |
|
154 | 150 | |
|
155 | 151 | |
|
156 | 152 | _EXCEPTION_MAP = { |
|
157 | 153 | 'abort': RepositoryError, |
|
158 | 154 | 'archive': ImproperArchiveTypeError, |
|
159 | 155 | 'error': RepositoryError, |
|
160 | 156 | 'lookup': CommitDoesNotExistError, |
|
161 | 157 | 'repo_locked': RepositoryError, |
|
162 | 158 | 'requirement': RepositoryRequirementError, |
|
163 | 159 | 'unhandled': UnhandledException, |
|
164 | 160 | # TODO: johbo: Define our own exception for this and stop abusing |
|
165 | 161 | # urllib's exception class. |
|
166 | 162 | 'url_error': urllib2.URLError, |
|
167 | 163 | 'subrepo_merge_error': SubrepoMergeError, |
|
168 | 164 | } |
|
169 | 165 | |
|
170 | 166 | |
|
171 | 167 | def map_vcs_exceptions(func): |
|
172 | 168 | """ |
|
173 | 169 | Utility to decorate functions so that plain exceptions are translated. |
|
174 | 170 | |
|
175 | 171 | The translation is based on `exc_map` which maps a `str` indicating |
|
176 | 172 | the error type into an exception class representing this error inside |
|
177 | 173 | of the vcs layer. |
|
178 | 174 | """ |
|
179 | 175 | |
|
180 | 176 | @functools.wraps(func) |
|
181 | 177 | def wrapper(*args, **kwargs): |
|
182 | 178 | try: |
|
183 | 179 | return func(*args, **kwargs) |
|
184 | 180 | except Exception as e: |
|
185 | 181 | # The error middleware adds information if it finds |
|
186 | 182 | # __traceback_info__ in a frame object. This way the remote |
|
187 | 183 | # traceback information is made available in error reports. |
|
188 | 184 | remote_tb = getattr(e, '_vcs_server_traceback', None) |
|
189 | 185 | if remote_tb: |
|
190 | 186 | __traceback_info__ = ( |
|
191 | 187 | 'Found VCSServer remote traceback information:\n\n' + |
|
192 | 188 | '\n'.join(remote_tb)) |
|
193 | 189 | |
|
194 | 190 | # Avoid that remote_tb also appears in the frame |
|
195 | 191 | del remote_tb |
|
196 | 192 | |
|
197 | 193 | # Special vcs errors had an attribute "_vcs_kind" which is used |
|
198 | 194 | # to translate them to the proper exception class in the vcs |
|
199 | 195 | # client layer. |
|
200 | 196 | kind = getattr(e, '_vcs_kind', None) |
|
201 | 197 | if kind: |
|
202 | 198 | raise _EXCEPTION_MAP[kind](*e.args) |
|
203 | 199 | else: |
|
204 | 200 | raise |
|
205 | 201 | return wrapper |
@@ -1,462 +1,460 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import base64 |
|
22 | 22 | |
|
23 | 23 | import mock |
|
24 | 24 | import pytest |
|
25 | 25 | |
|
26 | 26 | from rhodecode.tests.utils import CustomTestApp |
|
27 | 27 | |
|
28 | 28 | from rhodecode.lib.caching_query import FromCache |
|
29 | 29 | from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon |
|
30 | 30 | from rhodecode.lib.middleware import simplevcs |
|
31 | 31 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
32 | 32 | from rhodecode.lib.middleware.utils import scm_app_http |
|
33 | 33 | from rhodecode.model.db import User, _hash_key |
|
34 | 34 | from rhodecode.model.meta import Session |
|
35 | 35 | from rhodecode.tests import ( |
|
36 | 36 | HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
37 | 37 | from rhodecode.tests.lib.middleware import mock_scm_app |
|
38 | 38 | from rhodecode.tests.utils import set_anonymous_access |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | class StubVCSController(simplevcs.SimpleVCS): |
|
42 | 42 | |
|
43 | 43 | SCM = 'hg' |
|
44 | 44 | stub_response_body = tuple() |
|
45 | 45 | |
|
46 | 46 | def __init__(self, *args, **kwargs): |
|
47 | 47 | super(StubVCSController, self).__init__(*args, **kwargs) |
|
48 | 48 | self._action = 'pull' |
|
49 | 49 | self._name = HG_REPO |
|
50 | 50 | self.set_repo_names(None) |
|
51 | 51 | |
|
52 | 52 | def _get_repository_name(self, environ): |
|
53 | 53 | return self._name |
|
54 | 54 | |
|
55 | 55 | def _get_action(self, environ): |
|
56 | 56 | return self._action |
|
57 | 57 | |
|
58 | 58 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
59 | 59 | def fake_app(environ, start_response): |
|
60 | 60 | start_response('200 OK', []) |
|
61 | 61 | return self.stub_response_body |
|
62 | 62 | return fake_app |
|
63 | 63 | |
|
64 | 64 | def _create_config(self, extras, repo_name): |
|
65 | 65 | return None |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | @pytest.fixture |
|
69 | 69 | def vcscontroller(pylonsapp, config_stub): |
|
70 | 70 | config_stub.testing_securitypolicy() |
|
71 | 71 | config_stub.include('rhodecode.authentication') |
|
72 | 72 | |
|
73 | 73 | set_anonymous_access(True) |
|
74 | 74 | controller = StubVCSController(pylonsapp, pylonsapp.config, None) |
|
75 | 75 | app = HttpsFixup(controller, pylonsapp.config) |
|
76 | 76 | app = CustomTestApp(app) |
|
77 | 77 | |
|
78 | 78 | _remove_default_user_from_query_cache() |
|
79 | 79 | |
|
80 | 80 | # Sanity checks that things are set up correctly |
|
81 | 81 | app.get('/' + HG_REPO, status=200) |
|
82 | 82 | |
|
83 | 83 | app.controller = controller |
|
84 | 84 | return app |
|
85 | 85 | |
|
86 | 86 | |
|
87 | 87 | def _remove_default_user_from_query_cache(): |
|
88 | 88 | user = User.get_default_user(cache=True) |
|
89 | 89 | query = Session().query(User).filter(User.username == user.username) |
|
90 | 90 | query = query.options(FromCache( |
|
91 | 91 | "sql_cache_short", "get_user_%s" % _hash_key(user.username))) |
|
92 | 92 | query.invalidate() |
|
93 | 93 | Session().expire(user) |
|
94 | 94 | |
|
95 | 95 | |
|
96 | 96 | @pytest.fixture |
|
97 | 97 | def disable_anonymous_user(request, pylonsapp): |
|
98 | 98 | set_anonymous_access(False) |
|
99 | 99 | |
|
100 | 100 | @request.addfinalizer |
|
101 | 101 | def cleanup(): |
|
102 | 102 | set_anonymous_access(True) |
|
103 | 103 | |
|
104 | 104 | |
|
105 | 105 | def test_handles_exceptions_during_permissions_checks( |
|
106 | 106 | vcscontroller, disable_anonymous_user): |
|
107 | 107 | user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
108 | 108 | auth_password = base64.encodestring(user_and_pass).strip() |
|
109 | 109 | extra_environ = { |
|
110 | 110 | 'AUTH_TYPE': 'Basic', |
|
111 | 111 | 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password, |
|
112 | 112 | 'REMOTE_USER': TEST_USER_ADMIN_LOGIN, |
|
113 | 113 | } |
|
114 | 114 | |
|
115 | 115 | # Verify that things are hooked up correctly |
|
116 | 116 | vcscontroller.get('/', status=200, extra_environ=extra_environ) |
|
117 | 117 | |
|
118 | 118 | # Simulate trouble during permission checks |
|
119 | 119 | with mock.patch('rhodecode.model.db.User.get_by_username', |
|
120 | 120 | side_effect=Exception) as get_user: |
|
121 | 121 | # Verify that a correct 500 is returned and check that the expected |
|
122 | 122 | # code path was hit. |
|
123 | 123 | vcscontroller.get('/', status=500, extra_environ=extra_environ) |
|
124 | 124 | assert get_user.called |
|
125 | 125 | |
|
126 | 126 | |
|
127 | 127 | def test_returns_forbidden_if_no_anonymous_access( |
|
128 | 128 | vcscontroller, disable_anonymous_user): |
|
129 | 129 | vcscontroller.get('/', status=401) |
|
130 | 130 | |
|
131 | 131 | |
|
132 | 132 | class StubFailVCSController(simplevcs.SimpleVCS): |
|
133 | 133 | def _handle_request(self, environ, start_response): |
|
134 | 134 | raise Exception("BOOM") |
|
135 | 135 | |
|
136 | 136 | |
|
137 | 137 | @pytest.fixture(scope='module') |
|
138 | 138 | def fail_controller(pylonsapp): |
|
139 | 139 | controller = StubFailVCSController(pylonsapp, pylonsapp.config, None) |
|
140 | 140 | controller = HttpsFixup(controller, pylonsapp.config) |
|
141 | 141 | controller = CustomTestApp(controller) |
|
142 | 142 | return controller |
|
143 | 143 | |
|
144 | 144 | |
|
145 | 145 | def test_handles_exceptions_as_internal_server_error(fail_controller): |
|
146 | 146 | fail_controller.get('/', status=500) |
|
147 | 147 | |
|
148 | 148 | |
|
149 | 149 | def test_provides_traceback_for_appenlight(fail_controller): |
|
150 | 150 | response = fail_controller.get( |
|
151 | 151 | '/', status=500, extra_environ={'appenlight.client': 'fake'}) |
|
152 | 152 | assert 'appenlight.__traceback' in response.request.environ |
|
153 | 153 | |
|
154 | 154 | |
|
155 | 155 | def test_provides_utils_scm_app_as_scm_app_by_default(pylonsapp): |
|
156 | 156 | controller = StubVCSController(pylonsapp, pylonsapp.config, None) |
|
157 | 157 | assert controller.scm_app is scm_app_http |
|
158 | 158 | |
|
159 | 159 | |
|
160 | 160 | def test_allows_to_override_scm_app_via_config(pylonsapp): |
|
161 | 161 | config = pylonsapp.config.copy() |
|
162 | 162 | config['vcs.scm_app_implementation'] = ( |
|
163 | 163 | 'rhodecode.tests.lib.middleware.mock_scm_app') |
|
164 | 164 | controller = StubVCSController(pylonsapp, config, None) |
|
165 | 165 | assert controller.scm_app is mock_scm_app |
|
166 | 166 | |
|
167 | 167 | |
|
168 | 168 | @pytest.mark.parametrize('query_string, expected', [ |
|
169 | 169 | ('cmd=stub_command', True), |
|
170 | 170 | ('cmd=listkeys', False), |
|
171 | 171 | ]) |
|
172 | 172 | def test_should_check_locking(query_string, expected): |
|
173 | 173 | result = simplevcs._should_check_locking(query_string) |
|
174 | 174 | assert result == expected |
|
175 | 175 | |
|
176 | 176 | |
|
177 | 177 | class TestShadowRepoRegularExpression(object): |
|
178 | 178 | pr_segment = 'pull-request' |
|
179 | 179 | shadow_segment = 'repository' |
|
180 | 180 | |
|
181 | 181 | @pytest.mark.parametrize('url, expected', [ |
|
182 | 182 | # repo with/without groups |
|
183 | 183 | ('My-Repo/{pr_segment}/1/{shadow_segment}', True), |
|
184 | 184 | ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True), |
|
185 | 185 | ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True), |
|
186 | 186 | ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True), |
|
187 | 187 | |
|
188 | 188 | # pull request ID |
|
189 | 189 | ('MyRepo/{pr_segment}/1/{shadow_segment}', True), |
|
190 | 190 | ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True), |
|
191 | 191 | ('MyRepo/{pr_segment}/-1/{shadow_segment}', False), |
|
192 | 192 | ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False), |
|
193 | 193 | |
|
194 | 194 | # unicode |
|
195 | 195 | (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True), |
|
196 | 196 | (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True), |
|
197 | 197 | |
|
198 | 198 | # trailing/leading slash |
|
199 | 199 | ('/My-Repo/{pr_segment}/1/{shadow_segment}', False), |
|
200 | 200 | ('My-Repo/{pr_segment}/1/{shadow_segment}/', False), |
|
201 | 201 | ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False), |
|
202 | 202 | |
|
203 | 203 | # misc |
|
204 | 204 | ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False), |
|
205 | 205 | ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False), |
|
206 | 206 | ]) |
|
207 | 207 | def test_shadow_repo_regular_expression(self, url, expected): |
|
208 | 208 | from rhodecode.lib.middleware.simplevcs import SimpleVCS |
|
209 | 209 | url = url.format( |
|
210 | 210 | pr_segment=self.pr_segment, |
|
211 | 211 | shadow_segment=self.shadow_segment) |
|
212 | 212 | match_obj = SimpleVCS.shadow_repo_re.match(url) |
|
213 | 213 | assert (match_obj is not None) == expected |
|
214 | 214 | |
|
215 | 215 | |
|
216 | 216 | @pytest.mark.backends('git', 'hg') |
|
217 | 217 | class TestShadowRepoExposure(object): |
|
218 | 218 | |
|
219 | 219 | def test_pull_on_shadow_repo_propagates_to_wsgi_app(self, pylonsapp): |
|
220 | 220 | """ |
|
221 | 221 | Check that a pull action to a shadow repo is propagated to the |
|
222 | 222 | underlying wsgi app. |
|
223 | 223 | """ |
|
224 | 224 | controller = StubVCSController(pylonsapp, pylonsapp.config, None) |
|
225 | 225 | controller._check_ssl = mock.Mock() |
|
226 | 226 | controller.is_shadow_repo = True |
|
227 | 227 | controller._action = 'pull' |
|
228 | 228 | controller.stub_response_body = 'dummy body value' |
|
229 | 229 | environ_stub = { |
|
230 | 230 | 'HTTP_HOST': 'test.example.com', |
|
231 | 231 | 'REQUEST_METHOD': 'GET', |
|
232 | 232 | 'wsgi.url_scheme': 'http', |
|
233 | 233 | } |
|
234 | 234 | |
|
235 | 235 | response = controller(environ_stub, mock.Mock()) |
|
236 | 236 | response_body = ''.join(response) |
|
237 | 237 | |
|
238 | 238 | # Assert that we got the response from the wsgi app. |
|
239 | 239 | assert response_body == controller.stub_response_body |
|
240 | 240 | |
|
241 | 241 | def test_push_on_shadow_repo_raises(self, pylonsapp): |
|
242 | 242 | """ |
|
243 | 243 | Check that a push action to a shadow repo is aborted. |
|
244 | 244 | """ |
|
245 | 245 | controller = StubVCSController(pylonsapp, pylonsapp.config, None) |
|
246 | 246 | controller._check_ssl = mock.Mock() |
|
247 | 247 | controller.is_shadow_repo = True |
|
248 | 248 | controller._action = 'push' |
|
249 | 249 | controller.stub_response_body = 'dummy body value' |
|
250 | 250 | environ_stub = { |
|
251 | 251 | 'HTTP_HOST': 'test.example.com', |
|
252 | 252 | 'REQUEST_METHOD': 'GET', |
|
253 | 253 | 'wsgi.url_scheme': 'http', |
|
254 | 254 | } |
|
255 | 255 | |
|
256 | 256 | response = controller(environ_stub, mock.Mock()) |
|
257 | 257 | response_body = ''.join(response) |
|
258 | 258 | |
|
259 | 259 | assert response_body != controller.stub_response_body |
|
260 | 260 | # Assert that a 406 error is returned. |
|
261 | 261 | assert '406 Not Acceptable' in response_body |
|
262 | 262 | |
|
263 | 263 | def test_set_repo_names_no_shadow(self, pylonsapp): |
|
264 | 264 | """ |
|
265 | 265 | Check that the set_repo_names method sets all names to the one returned |
|
266 | 266 | by the _get_repository_name method on a request to a non shadow repo. |
|
267 | 267 | """ |
|
268 | 268 | environ_stub = {} |
|
269 | 269 | controller = StubVCSController(pylonsapp, pylonsapp.config, None) |
|
270 | 270 | controller._name = 'RepoGroup/MyRepo' |
|
271 | 271 | controller.set_repo_names(environ_stub) |
|
272 | 272 | assert not controller.is_shadow_repo |
|
273 | 273 | assert (controller.url_repo_name == |
|
274 | 274 | controller.acl_repo_name == |
|
275 | 275 | controller.vcs_repo_name == |
|
276 | 276 | controller._get_repository_name(environ_stub)) |
|
277 | 277 | |
|
278 | 278 | def test_set_repo_names_with_shadow(self, pylonsapp, pr_util): |
|
279 | 279 | """ |
|
280 | 280 | Check that the set_repo_names method sets correct names on a request |
|
281 | 281 | to a shadow repo. |
|
282 | 282 | """ |
|
283 | 283 | from rhodecode.model.pull_request import PullRequestModel |
|
284 | 284 | |
|
285 | 285 | pull_request = pr_util.create_pull_request() |
|
286 | 286 | shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format( |
|
287 | 287 | target=pull_request.target_repo.repo_name, |
|
288 | 288 | pr_id=pull_request.pull_request_id, |
|
289 | 289 | pr_segment=TestShadowRepoRegularExpression.pr_segment, |
|
290 | 290 | shadow_segment=TestShadowRepoRegularExpression.shadow_segment) |
|
291 | 291 | controller = StubVCSController(pylonsapp, pylonsapp.config, None) |
|
292 | 292 | controller._name = shadow_url |
|
293 | 293 | controller.set_repo_names({}) |
|
294 | 294 | |
|
295 | 295 | # Get file system path to shadow repo for assertions. |
|
296 | 296 | workspace_id = PullRequestModel()._workspace_id(pull_request) |
|
297 | 297 | target_vcs = pull_request.target_repo.scm_instance() |
|
298 | 298 | vcs_repo_name = target_vcs._get_shadow_repository_path( |
|
299 | 299 | workspace_id) |
|
300 | 300 | |
|
301 | 301 | assert controller.vcs_repo_name == vcs_repo_name |
|
302 | 302 | assert controller.url_repo_name == shadow_url |
|
303 | 303 | assert controller.acl_repo_name == pull_request.target_repo.repo_name |
|
304 | 304 | assert controller.is_shadow_repo |
|
305 | 305 | |
|
306 | 306 | def test_set_repo_names_with_shadow_but_missing_pr( |
|
307 | 307 | self, pylonsapp, pr_util): |
|
308 | 308 | """ |
|
309 | 309 | Checks that the set_repo_names method enforces matching target repos |
|
310 | 310 | and pull request IDs. |
|
311 | 311 | """ |
|
312 | 312 | pull_request = pr_util.create_pull_request() |
|
313 | 313 | shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format( |
|
314 | 314 | target=pull_request.target_repo.repo_name, |
|
315 | 315 | pr_id=999999999, |
|
316 | 316 | pr_segment=TestShadowRepoRegularExpression.pr_segment, |
|
317 | 317 | shadow_segment=TestShadowRepoRegularExpression.shadow_segment) |
|
318 | 318 | controller = StubVCSController(pylonsapp, pylonsapp.config, None) |
|
319 | 319 | controller._name = shadow_url |
|
320 | 320 | controller.set_repo_names({}) |
|
321 | 321 | |
|
322 | 322 | assert not controller.is_shadow_repo |
|
323 | 323 | assert (controller.url_repo_name == |
|
324 | 324 | controller.acl_repo_name == |
|
325 | 325 | controller.vcs_repo_name) |
|
326 | 326 | |
|
327 | 327 | |
|
328 | 328 | @pytest.mark.usefixtures('db') |
|
329 | @mock.patch.multiple( | |
|
330 | 'Pyro4.config', SERVERTYPE='multiplex', POLLTIMEOUT=0.01) | |
|
331 | 329 | class TestGenerateVcsResponse: |
|
332 | 330 | |
|
333 | 331 | def test_ensures_that_start_response_is_called_early_enough(self): |
|
334 | 332 | self.call_controller_with_response_body(iter(['a', 'b'])) |
|
335 | 333 | assert self.start_response.called |
|
336 | 334 | |
|
337 | 335 | def test_invalidates_cache_after_body_is_consumed(self): |
|
338 | 336 | result = self.call_controller_with_response_body(iter(['a', 'b'])) |
|
339 | 337 | assert not self.was_cache_invalidated() |
|
340 | 338 | # Consume the result |
|
341 | 339 | list(result) |
|
342 | 340 | assert self.was_cache_invalidated() |
|
343 | 341 | |
|
344 | 342 | @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC') |
|
345 | 343 | def test_handles_locking_exception(self, http_locked_rc): |
|
346 | 344 | result = self.call_controller_with_response_body( |
|
347 | 345 | self.raise_result_iter(vcs_kind='repo_locked')) |
|
348 | 346 | assert not http_locked_rc.called |
|
349 | 347 | # Consume the result |
|
350 | 348 | list(result) |
|
351 | 349 | assert http_locked_rc.called |
|
352 | 350 | |
|
353 | 351 | @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPRequirementError') |
|
354 | 352 | def test_handles_requirement_exception(self, http_requirement): |
|
355 | 353 | result = self.call_controller_with_response_body( |
|
356 | 354 | self.raise_result_iter(vcs_kind='requirement')) |
|
357 | 355 | assert not http_requirement.called |
|
358 | 356 | # Consume the result |
|
359 | 357 | list(result) |
|
360 | 358 | assert http_requirement.called |
|
361 | 359 | |
|
362 | 360 | @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC') |
|
363 | 361 | def test_handles_locking_exception_in_app_call(self, http_locked_rc): |
|
364 | 362 | app_factory_patcher = mock.patch.object( |
|
365 | 363 | StubVCSController, '_create_wsgi_app') |
|
366 | 364 | with app_factory_patcher as app_factory: |
|
367 | 365 | app_factory().side_effect = self.vcs_exception() |
|
368 | 366 | result = self.call_controller_with_response_body(['a']) |
|
369 | 367 | list(result) |
|
370 | 368 | assert http_locked_rc.called |
|
371 | 369 | |
|
372 | 370 | def test_raises_unknown_exceptions(self): |
|
373 | 371 | result = self.call_controller_with_response_body( |
|
374 | 372 | self.raise_result_iter(vcs_kind='unknown')) |
|
375 | 373 | with pytest.raises(Exception): |
|
376 | 374 | list(result) |
|
377 | 375 | |
|
378 | 376 | def test_prepare_callback_daemon_is_called(self): |
|
379 | 377 | def side_effect(extras): |
|
380 | 378 | return DummyHooksCallbackDaemon(), extras |
|
381 | 379 | |
|
382 | 380 | prepare_patcher = mock.patch.object( |
|
383 | 381 | StubVCSController, '_prepare_callback_daemon') |
|
384 | 382 | with prepare_patcher as prepare_mock: |
|
385 | 383 | prepare_mock.side_effect = side_effect |
|
386 | 384 | self.call_controller_with_response_body(iter(['a', 'b'])) |
|
387 | 385 | assert prepare_mock.called |
|
388 | 386 | assert prepare_mock.call_count == 1 |
|
389 | 387 | |
|
390 | 388 | def call_controller_with_response_body(self, response_body): |
|
391 | 389 | settings = { |
|
392 | 390 | 'base_path': 'fake_base_path', |
|
393 | 391 | 'vcs.hooks.protocol': 'http', |
|
394 | 392 | 'vcs.hooks.direct_calls': False, |
|
395 | 393 | } |
|
396 | 394 | controller = StubVCSController(None, settings, None) |
|
397 | 395 | controller._invalidate_cache = mock.Mock() |
|
398 | 396 | controller.stub_response_body = response_body |
|
399 | 397 | self.start_response = mock.Mock() |
|
400 | 398 | result = controller._generate_vcs_response( |
|
401 | 399 | environ={}, start_response=self.start_response, |
|
402 | 400 | repo_path='fake_repo_path', |
|
403 | 401 | extras={}, action='push') |
|
404 | 402 | self.controller = controller |
|
405 | 403 | return result |
|
406 | 404 | |
|
407 | 405 | def raise_result_iter(self, vcs_kind='repo_locked'): |
|
408 | 406 | """ |
|
409 | 407 | Simulates an exception due to a vcs raised exception if kind vcs_kind |
|
410 | 408 | """ |
|
411 | 409 | raise self.vcs_exception(vcs_kind=vcs_kind) |
|
412 | 410 | yield "never_reached" |
|
413 | 411 | |
|
414 | 412 | def vcs_exception(self, vcs_kind='repo_locked'): |
|
415 | 413 | locked_exception = Exception('TEST_MESSAGE') |
|
416 | 414 | locked_exception._vcs_kind = vcs_kind |
|
417 | 415 | return locked_exception |
|
418 | 416 | |
|
419 | 417 | def was_cache_invalidated(self): |
|
420 | 418 | return self.controller._invalidate_cache.called |
|
421 | 419 | |
|
422 | 420 | |
|
423 | 421 | class TestInitializeGenerator: |
|
424 | 422 | |
|
425 | 423 | def test_drains_first_element(self): |
|
426 | 424 | gen = self.factory(['__init__', 1, 2]) |
|
427 | 425 | result = list(gen) |
|
428 | 426 | assert result == [1, 2] |
|
429 | 427 | |
|
430 | 428 | @pytest.mark.parametrize('values', [ |
|
431 | 429 | [], |
|
432 | 430 | [1, 2], |
|
433 | 431 | ]) |
|
434 | 432 | def test_raises_value_error(self, values): |
|
435 | 433 | with pytest.raises(ValueError): |
|
436 | 434 | self.factory(values) |
|
437 | 435 | |
|
438 | 436 | @simplevcs.initialize_generator |
|
439 | 437 | def factory(self, iterable): |
|
440 | 438 | for elem in iterable: |
|
441 | 439 | yield elem |
|
442 | 440 | |
|
443 | 441 | |
|
444 | 442 | class TestPrepareHooksDaemon(object): |
|
445 | 443 | def test_calls_imported_prepare_callback_daemon(self, app_settings): |
|
446 | 444 | expected_extras = {'extra1': 'value1'} |
|
447 | 445 | daemon = DummyHooksCallbackDaemon() |
|
448 | 446 | |
|
449 | 447 | controller = StubVCSController(None, app_settings, None) |
|
450 | 448 | prepare_patcher = mock.patch.object( |
|
451 | 449 | simplevcs, 'prepare_callback_daemon', |
|
452 | 450 | return_value=(daemon, expected_extras)) |
|
453 | 451 | with prepare_patcher as prepare_mock: |
|
454 | 452 | callback_daemon, extras = controller._prepare_callback_daemon( |
|
455 | 453 | expected_extras.copy()) |
|
456 | 454 | prepare_mock.assert_called_once_with( |
|
457 | 455 | expected_extras, |
|
458 | 456 | protocol=app_settings['vcs.hooks.protocol'], |
|
459 | 457 | use_direct_calls=app_settings['vcs.hooks.direct_calls']) |
|
460 | 458 | |
|
461 | 459 | assert callback_daemon == daemon |
|
462 | 460 | assert extras == extras |
@@ -1,61 +1,59 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import mock |
|
22 | 22 | import pytest |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | @pytest.mark.usefixtures('autologin_user', 'app') |
|
26 | 26 | def test_vcs_available_returns_summary_page(app, backend): |
|
27 | 27 | url = '/{repo_name}'.format(repo_name=backend.repo.repo_name) |
|
28 | 28 | response = app.get(url) |
|
29 | 29 | assert response.status_code == 200 |
|
30 | 30 | assert 'Summary' in response.body |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | @pytest.mark.usefixtures('autologin_user', 'app') |
|
34 | 34 | def test_vcs_unavailable_returns_vcs_error_page(app, backend, app_settings): |
|
35 | 35 | from rhodecode.lib.vcs.exceptions import VCSCommunicationError |
|
36 | 36 | from rhodecode.lib.middleware.error_handling import ( |
|
37 | 37 | PylonsErrorHandlingMiddleware) |
|
38 | 38 | |
|
39 | 39 | # Depending on the used VCSServer protocol we have to patch a different |
|
40 | 40 | # RemoteRepo class to raise an exception. For the test it doesn't matter |
|
41 |
# if http |
|
|
41 | # if http is used, it just requires the exception to be raised. | |
|
42 | 42 | vcs_protocol = app_settings['vcs.server.protocol'] |
|
43 | 43 | if vcs_protocol == 'http': |
|
44 | 44 | from rhodecode.lib.vcs.client_http import RemoteRepo |
|
45 | elif vcs_protocol == 'pyro4': | |
|
46 | from rhodecode.lib.vcs.client import RemoteRepo | |
|
47 | 45 | else: |
|
48 | 46 | pytest.fail('Unknown VCS server protocol: "{}"'.format(vcs_protocol)) |
|
49 | 47 | |
|
50 | 48 | url = '/{repo_name}'.format(repo_name=backend.repo.repo_name) |
|
51 | 49 | |
|
52 | 50 | # Patch remote repo to raise an exception instead of making a RPC. |
|
53 | 51 | with mock.patch.object(RemoteRepo, '__getattr__') as remote_mock: |
|
54 | 52 | remote_mock.side_effect = VCSCommunicationError() |
|
55 | 53 | # Patch pylons error handling middleware to not re-raise exceptions. |
|
56 | 54 | with mock.patch.object(PylonsErrorHandlingMiddleware, 'reraise') as r: |
|
57 | 55 | r.return_value = False |
|
58 | 56 | response = app.get(url, expect_errors=True) |
|
59 | 57 | |
|
60 | 58 | assert response.status_code == 502 |
|
61 | 59 | assert 'Could not connect to VCS Server' in response.body |
@@ -1,130 +1,83 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | import mock | |
|
22 | import Pyro4 | |
|
23 | 21 | import pytest |
|
24 | 22 | |
|
25 | 23 | from rhodecode.tests.utils import CustomTestApp |
|
26 | 24 | from rhodecode.lib.middleware.utils import scm_app_http, scm_app |
|
27 | 25 | from rhodecode.lib.vcs.conf import settings |
|
28 | 26 | |
|
29 | 27 | |
|
30 | 28 | def vcs_http_app(vcsserver_http_echo_app): |
|
31 | 29 | """ |
|
32 | 30 | VcsHttpProxy wrapped in WebTest. |
|
33 | 31 | """ |
|
34 | 32 | git_url = vcsserver_http_echo_app.http_url + 'stream/git/' |
|
35 | 33 | vcs_http_proxy = scm_app_http.VcsHttpProxy( |
|
36 | 34 | git_url, 'stub_path', 'stub_name', None) |
|
37 | 35 | app = CustomTestApp(vcs_http_proxy) |
|
38 | 36 | return app |
|
39 | 37 | |
|
40 | 38 | |
|
41 | 39 | @pytest.fixture(scope='module') |
|
42 | 40 | def vcsserver_http_echo_app(request, vcsserver_factory): |
|
43 | 41 | """ |
|
44 | 42 | A running VCSServer with the EchoApp activated via HTTP. |
|
45 | 43 | """ |
|
46 | 44 | vcsserver = vcsserver_factory( |
|
47 | 45 | request=request, |
|
48 | 46 | use_http=True, |
|
49 | 47 | overrides=[{'app:main': {'dev.use_echo_app': 'true'}}]) |
|
50 | 48 | return vcsserver |
|
51 | 49 | |
|
52 | 50 | |
|
53 | 51 | @pytest.fixture(scope='session') |
|
54 | 52 | def data(): |
|
55 | 53 | one_kb = "x" * 1024 |
|
56 | 54 | return one_kb * 1024 * 10 |
|
57 | 55 | |
|
58 | 56 | |
|
59 | 57 | def test_reuse_app_no_data(repeat, vcsserver_http_echo_app): |
|
60 | 58 | app = vcs_http_app(vcsserver_http_echo_app) |
|
61 | 59 | for x in xrange(repeat / 10): |
|
62 | 60 | response = app.post('/') |
|
63 | 61 | assert response.status_code == 200 |
|
64 | 62 | |
|
65 | 63 | |
|
66 | 64 | def test_reuse_app_with_data(data, repeat, vcsserver_http_echo_app): |
|
67 | 65 | app = vcs_http_app(vcsserver_http_echo_app) |
|
68 | 66 | for x in xrange(repeat / 10): |
|
69 | 67 | response = app.post('/', params=data) |
|
70 | 68 | assert response.status_code == 200 |
|
71 | 69 | |
|
72 | 70 | |
|
73 | 71 | def test_create_app_per_request_no_data(repeat, vcsserver_http_echo_app): |
|
74 | 72 | for x in xrange(repeat / 10): |
|
75 | 73 | app = vcs_http_app(vcsserver_http_echo_app) |
|
76 | 74 | response = app.post('/') |
|
77 | 75 | assert response.status_code == 200 |
|
78 | 76 | |
|
79 | 77 | |
|
80 | 78 | def test_create_app_per_request_with_data( |
|
81 | 79 | data, repeat, vcsserver_http_echo_app): |
|
82 | 80 | for x in xrange(repeat / 10): |
|
83 | 81 | app = vcs_http_app(vcsserver_http_echo_app) |
|
84 | 82 | response = app.post('/', params=data) |
|
85 | 83 | assert response.status_code == 200 |
|
86 | ||
|
87 | ||
|
88 | @pytest.fixture(scope='module') | |
|
89 | def vcsserver_pyro_echo_app(request, vcsserver_factory): | |
|
90 | """ | |
|
91 | A running VCSServer with the EchoApp activated via Pyro4. | |
|
92 | """ | |
|
93 | vcsserver = vcsserver_factory( | |
|
94 | request=request, | |
|
95 | use_http=False, | |
|
96 | overrides=[{'DEFAULT': {'dev.use_echo_app': 'true'}}]) | |
|
97 | return vcsserver | |
|
98 | ||
|
99 | ||
|
100 | def vcs_pyro4_app(vcsserver_pyro_echo_app): | |
|
101 | """ | |
|
102 | Pyro4 based Vcs proxy wrapped in WebTest | |
|
103 | """ | |
|
104 | stub_config = { | |
|
105 | 'git_update_server_info': 'stub', | |
|
106 | } | |
|
107 | server_and_port = vcsserver_pyro_echo_app.server_and_port | |
|
108 | GIT_REMOTE_WSGI = Pyro4.Proxy( | |
|
109 | settings.pyro_remote( | |
|
110 | settings.PYRO_GIT_REMOTE_WSGI, server_and_port)) | |
|
111 | with mock.patch('rhodecode.lib.middleware.utils.scm_app.GIT_REMOTE_WSGI', | |
|
112 | GIT_REMOTE_WSGI): | |
|
113 | pyro4_app = scm_app.create_git_wsgi_app( | |
|
114 | 'stub_path', 'stub_name', stub_config) | |
|
115 | app = CustomTestApp(pyro4_app) | |
|
116 | return app | |
|
117 | ||
|
118 | ||
|
119 | def test_pyro4_no_data(repeat, pylonsapp, vcsserver_pyro_echo_app): | |
|
120 | for x in xrange(repeat / 10): | |
|
121 | app = vcs_pyro4_app(vcsserver_pyro_echo_app) | |
|
122 | response = app.post('/') | |
|
123 | assert response.status_code == 200 | |
|
124 | ||
|
125 | ||
|
126 | def test_pyro4_with_data(repeat, pylonsapp, vcsserver_pyro_echo_app, data): | |
|
127 | for x in xrange(repeat / 10): | |
|
128 | app = vcs_pyro4_app(vcsserver_pyro_echo_app) | |
|
129 | response = app.post('/', params=data) | |
|
130 | assert response.status_code == 200 |
@@ -1,388 +1,319 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import json |
|
22 | 22 | import logging |
|
23 | 23 | from StringIO import StringIO |
|
24 | 24 | |
|
25 | 25 | import mock |
|
26 | 26 | import pytest |
|
27 | 27 | |
|
28 | 28 | from rhodecode.lib import hooks_daemon |
|
29 | 29 | from rhodecode.tests.utils import assert_message_in_log |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | class TestDummyHooksCallbackDaemon(object): |
|
33 | 33 | def test_hooks_module_path_set_properly(self): |
|
34 | 34 | daemon = hooks_daemon.DummyHooksCallbackDaemon() |
|
35 | 35 | assert daemon.hooks_module == 'rhodecode.lib.hooks_daemon' |
|
36 | 36 | |
|
37 | 37 | def test_logs_entering_the_hook(self): |
|
38 | 38 | daemon = hooks_daemon.DummyHooksCallbackDaemon() |
|
39 | 39 | with mock.patch.object(hooks_daemon.log, 'debug') as log_mock: |
|
40 | 40 | with daemon as return_value: |
|
41 | 41 | log_mock.assert_called_once_with( |
|
42 | 42 | 'Running dummy hooks callback daemon') |
|
43 | 43 | assert return_value == daemon |
|
44 | 44 | |
|
45 | 45 | def test_logs_exiting_the_hook(self): |
|
46 | 46 | daemon = hooks_daemon.DummyHooksCallbackDaemon() |
|
47 | 47 | with mock.patch.object(hooks_daemon.log, 'debug') as log_mock: |
|
48 | 48 | with daemon: |
|
49 | 49 | pass |
|
50 | 50 | log_mock.assert_called_with('Exiting dummy hooks callback daemon') |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class TestHooks(object): |
|
54 | 54 | def test_hooks_can_be_used_as_a_context_processor(self): |
|
55 | 55 | hooks = hooks_daemon.Hooks() |
|
56 | 56 | with hooks as return_value: |
|
57 | 57 | pass |
|
58 | 58 | assert hooks == return_value |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | class TestHooksHttpHandler(object): |
|
62 | 62 | def test_read_request_parses_method_name_and_arguments(self): |
|
63 | 63 | data = { |
|
64 | 64 | 'method': 'test', |
|
65 | 65 | 'extras': { |
|
66 | 66 | 'param1': 1, |
|
67 | 67 | 'param2': 'a' |
|
68 | 68 | } |
|
69 | 69 | } |
|
70 | 70 | request = self._generate_post_request(data) |
|
71 | 71 | hooks_patcher = mock.patch.object( |
|
72 | 72 | hooks_daemon.Hooks, data['method'], create=True, return_value=1) |
|
73 | 73 | |
|
74 | 74 | with hooks_patcher as hooks_mock: |
|
75 | 75 | MockServer(hooks_daemon.HooksHttpHandler, request) |
|
76 | 76 | |
|
77 | 77 | hooks_mock.assert_called_once_with(data['extras']) |
|
78 | 78 | |
|
79 | 79 | def test_hooks_serialized_result_is_returned(self): |
|
80 | 80 | request = self._generate_post_request({}) |
|
81 | 81 | rpc_method = 'test' |
|
82 | 82 | hook_result = { |
|
83 | 83 | 'first': 'one', |
|
84 | 84 | 'second': 2 |
|
85 | 85 | } |
|
86 | 86 | read_patcher = mock.patch.object( |
|
87 | 87 | hooks_daemon.HooksHttpHandler, '_read_request', |
|
88 | 88 | return_value=(rpc_method, {})) |
|
89 | 89 | hooks_patcher = mock.patch.object( |
|
90 | 90 | hooks_daemon.Hooks, rpc_method, create=True, |
|
91 | 91 | return_value=hook_result) |
|
92 | 92 | |
|
93 | 93 | with read_patcher, hooks_patcher: |
|
94 | 94 | server = MockServer(hooks_daemon.HooksHttpHandler, request) |
|
95 | 95 | |
|
96 | 96 | expected_result = json.dumps(hook_result) |
|
97 | 97 | assert server.request.output_stream.buflist[-1] == expected_result |
|
98 | 98 | |
|
99 | 99 | def test_exception_is_returned_in_response(self): |
|
100 | 100 | request = self._generate_post_request({}) |
|
101 | 101 | rpc_method = 'test' |
|
102 | 102 | read_patcher = mock.patch.object( |
|
103 | 103 | hooks_daemon.HooksHttpHandler, '_read_request', |
|
104 | 104 | return_value=(rpc_method, {})) |
|
105 | 105 | hooks_patcher = mock.patch.object( |
|
106 | 106 | hooks_daemon.Hooks, rpc_method, create=True, |
|
107 | 107 | side_effect=Exception('Test exception')) |
|
108 | 108 | |
|
109 | 109 | with read_patcher, hooks_patcher: |
|
110 | 110 | server = MockServer(hooks_daemon.HooksHttpHandler, request) |
|
111 | 111 | |
|
112 | 112 | expected_result = json.dumps({ |
|
113 | 113 | 'exception': 'Exception', |
|
114 | 114 | 'exception_args': ('Test exception', ) |
|
115 | 115 | }) |
|
116 | 116 | assert server.request.output_stream.buflist[-1] == expected_result |
|
117 | 117 | |
|
118 | 118 | def test_log_message_writes_to_debug_log(self, caplog): |
|
119 | 119 | ip_port = ('0.0.0.0', 8888) |
|
120 | 120 | handler = hooks_daemon.HooksHttpHandler( |
|
121 | 121 | MockRequest('POST /'), ip_port, mock.Mock()) |
|
122 | 122 | fake_date = '1/Nov/2015 00:00:00' |
|
123 | 123 | date_patcher = mock.patch.object( |
|
124 | 124 | handler, 'log_date_time_string', return_value=fake_date) |
|
125 | 125 | with date_patcher, caplog.at_level(logging.DEBUG): |
|
126 | 126 | handler.log_message('Some message %d, %s', 123, 'string') |
|
127 | 127 | |
|
128 | 128 | expected_message = '{} - - [{}] Some message 123, string'.format( |
|
129 | 129 | ip_port[0], fake_date) |
|
130 | 130 | assert_message_in_log( |
|
131 | 131 | caplog.records, expected_message, |
|
132 | 132 | levelno=logging.DEBUG, module='hooks_daemon') |
|
133 | 133 | |
|
134 | 134 | def _generate_post_request(self, data): |
|
135 | 135 | payload = json.dumps(data) |
|
136 | 136 | return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format( |
|
137 | 137 | len(payload), payload) |
|
138 | 138 | |
|
139 | 139 | |
|
140 | 140 | class ThreadedHookCallbackDaemon(object): |
|
141 | 141 | def test_constructor_calls_prepare(self): |
|
142 | 142 | prepare_daemon_patcher = mock.patch.object( |
|
143 | 143 | hooks_daemon.ThreadedHookCallbackDaemon, '_prepare') |
|
144 | 144 | with prepare_daemon_patcher as prepare_daemon_mock: |
|
145 | 145 | hooks_daemon.ThreadedHookCallbackDaemon() |
|
146 | 146 | prepare_daemon_mock.assert_called_once_with() |
|
147 | 147 | |
|
148 | 148 | def test_run_is_called_on_context_start(self): |
|
149 | 149 | patchers = mock.patch.multiple( |
|
150 | 150 | hooks_daemon.ThreadedHookCallbackDaemon, |
|
151 | 151 | _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT) |
|
152 | 152 | |
|
153 | 153 | with patchers as mocks: |
|
154 | 154 | daemon = hooks_daemon.ThreadedHookCallbackDaemon() |
|
155 | 155 | with daemon as daemon_context: |
|
156 | 156 | pass |
|
157 | 157 | mocks['_run'].assert_called_once_with() |
|
158 | 158 | assert daemon_context == daemon |
|
159 | 159 | |
|
160 | 160 | def test_stop_is_called_on_context_exit(self): |
|
161 | 161 | patchers = mock.patch.multiple( |
|
162 | 162 | hooks_daemon.ThreadedHookCallbackDaemon, |
|
163 | 163 | _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT) |
|
164 | 164 | |
|
165 | 165 | with patchers as mocks: |
|
166 | 166 | daemon = hooks_daemon.ThreadedHookCallbackDaemon() |
|
167 | 167 | with daemon as daemon_context: |
|
168 | 168 | assert mocks['_stop'].call_count == 0 |
|
169 | 169 | |
|
170 | 170 | mocks['_stop'].assert_called_once_with() |
|
171 | 171 | assert daemon_context == daemon |
|
172 | 172 | |
|
173 | 173 | |
|
174 | class TestPyro4HooksCallbackDaemon(object): | |
|
175 | def test_prepare_inits_pyro4_and_registers_hooks(self, caplog): | |
|
176 | pyro4_daemon = mock.Mock() | |
|
177 | ||
|
178 | with self._pyro4_patcher(pyro4_daemon), caplog.at_level(logging.DEBUG): | |
|
179 | daemon = hooks_daemon.Pyro4HooksCallbackDaemon() | |
|
180 | ||
|
181 | assert daemon._daemon == pyro4_daemon | |
|
182 | ||
|
183 | assert pyro4_daemon.register.call_count == 1 | |
|
184 | args, kwargs = pyro4_daemon.register.call_args | |
|
185 | assert len(args) == 1 | |
|
186 | assert isinstance(args[0], hooks_daemon.Hooks) | |
|
187 | ||
|
188 | assert_message_in_log( | |
|
189 | caplog.records, | |
|
190 | 'Preparing callback daemon and registering hook object', | |
|
191 | levelno=logging.DEBUG, module='hooks_daemon') | |
|
192 | ||
|
193 | def test_run_creates_a_thread(self): | |
|
194 | thread = mock.Mock() | |
|
195 | pyro4_daemon = mock.Mock() | |
|
196 | ||
|
197 | with self._pyro4_patcher(pyro4_daemon): | |
|
198 | daemon = hooks_daemon.Pyro4HooksCallbackDaemon() | |
|
199 | ||
|
200 | with self._thread_patcher(thread) as thread_mock: | |
|
201 | daemon._run() | |
|
202 | ||
|
203 | assert thread_mock.call_count == 1 | |
|
204 | args, kwargs = thread_mock.call_args | |
|
205 | assert args == () | |
|
206 | assert kwargs['target'] == pyro4_daemon.requestLoop | |
|
207 | assert kwargs['kwargs']['loopCondition']() is True | |
|
208 | ||
|
209 | def test_stop_cleans_up_the_connection(self, caplog): | |
|
210 | thread = mock.Mock() | |
|
211 | pyro4_daemon = mock.Mock() | |
|
212 | ||
|
213 | with self._pyro4_patcher(pyro4_daemon): | |
|
214 | daemon = hooks_daemon.Pyro4HooksCallbackDaemon() | |
|
215 | ||
|
216 | with self._thread_patcher(thread), caplog.at_level(logging.DEBUG): | |
|
217 | with daemon: | |
|
218 | assert daemon._daemon == pyro4_daemon | |
|
219 | assert daemon._callback_thread == thread | |
|
220 | ||
|
221 | assert daemon._daemon is None | |
|
222 | assert daemon._callback_thread is None | |
|
223 | pyro4_daemon.close.assert_called_with() | |
|
224 | thread.join.assert_called_once_with() | |
|
225 | ||
|
226 | assert_message_in_log( | |
|
227 | caplog.records, 'Waiting for background thread to finish.', | |
|
228 | levelno=logging.DEBUG, module='hooks_daemon') | |
|
229 | ||
|
230 | def _pyro4_patcher(self, daemon): | |
|
231 | return mock.patch.object( | |
|
232 | hooks_daemon.Pyro4, 'Daemon', return_value=daemon) | |
|
233 | ||
|
234 | def _thread_patcher(self, thread): | |
|
235 | return mock.patch.object( | |
|
236 | hooks_daemon.threading, 'Thread', return_value=thread) | |
|
237 | ||
|
238 | ||
|
239 | 174 | class TestHttpHooksCallbackDaemon(object): |
|
240 | 175 | def test_prepare_inits_daemon_variable(self, tcp_server, caplog): |
|
241 | 176 | with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG): |
|
242 | 177 | daemon = hooks_daemon.HttpHooksCallbackDaemon() |
|
243 | 178 | assert daemon._daemon == tcp_server |
|
244 | 179 | |
|
245 | 180 | assert_message_in_log( |
|
246 | 181 | caplog.records, |
|
247 | 182 | 'Preparing callback daemon and registering hook object', |
|
248 | 183 | levelno=logging.DEBUG, module='hooks_daemon') |
|
249 | 184 | |
|
250 | 185 | def test_prepare_inits_hooks_uri_and_logs_it( |
|
251 | 186 | self, tcp_server, caplog): |
|
252 | 187 | with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG): |
|
253 | 188 | daemon = hooks_daemon.HttpHooksCallbackDaemon() |
|
254 | 189 | |
|
255 | 190 | _, port = tcp_server.server_address |
|
256 | 191 | expected_uri = '{}:{}'.format(daemon.IP_ADDRESS, port) |
|
257 | 192 | assert daemon.hooks_uri == expected_uri |
|
258 | 193 | |
|
259 | 194 | assert_message_in_log( |
|
260 | 195 | caplog.records, 'Hooks uri is: {}'.format(expected_uri), |
|
261 | 196 | levelno=logging.DEBUG, module='hooks_daemon') |
|
262 | 197 | |
|
263 | 198 | def test_run_creates_a_thread(self, tcp_server): |
|
264 | 199 | thread = mock.Mock() |
|
265 | 200 | |
|
266 | 201 | with self._tcp_patcher(tcp_server): |
|
267 | 202 | daemon = hooks_daemon.HttpHooksCallbackDaemon() |
|
268 | 203 | |
|
269 | 204 | with self._thread_patcher(thread) as thread_mock: |
|
270 | 205 | daemon._run() |
|
271 | 206 | |
|
272 | 207 | thread_mock.assert_called_once_with( |
|
273 | 208 | target=tcp_server.serve_forever, |
|
274 | 209 | kwargs={'poll_interval': daemon.POLL_INTERVAL}) |
|
275 | 210 | assert thread.daemon is True |
|
276 | 211 | thread.start.assert_called_once_with() |
|
277 | 212 | |
|
278 | 213 | def test_run_logs(self, tcp_server, caplog): |
|
279 | 214 | |
|
280 | 215 | with self._tcp_patcher(tcp_server): |
|
281 | 216 | daemon = hooks_daemon.HttpHooksCallbackDaemon() |
|
282 | 217 | |
|
283 | 218 | with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG): |
|
284 | 219 | daemon._run() |
|
285 | 220 | |
|
286 | 221 | assert_message_in_log( |
|
287 | 222 | caplog.records, |
|
288 | 223 | 'Running event loop of callback daemon in background thread', |
|
289 | 224 | levelno=logging.DEBUG, module='hooks_daemon') |
|
290 | 225 | |
|
291 | 226 | def test_stop_cleans_up_the_connection(self, tcp_server, caplog): |
|
292 | 227 | thread = mock.Mock() |
|
293 | 228 | |
|
294 | 229 | with self._tcp_patcher(tcp_server): |
|
295 | 230 | daemon = hooks_daemon.HttpHooksCallbackDaemon() |
|
296 | 231 | |
|
297 | 232 | with self._thread_patcher(thread), caplog.at_level(logging.DEBUG): |
|
298 | 233 | with daemon: |
|
299 | 234 | assert daemon._daemon == tcp_server |
|
300 | 235 | assert daemon._callback_thread == thread |
|
301 | 236 | |
|
302 | 237 | assert daemon._daemon is None |
|
303 | 238 | assert daemon._callback_thread is None |
|
304 | 239 | tcp_server.shutdown.assert_called_with() |
|
305 | 240 | thread.join.assert_called_once_with() |
|
306 | 241 | |
|
307 | 242 | assert_message_in_log( |
|
308 | 243 | caplog.records, 'Waiting for background thread to finish.', |
|
309 | 244 | levelno=logging.DEBUG, module='hooks_daemon') |
|
310 | 245 | |
|
311 | 246 | def _tcp_patcher(self, tcp_server): |
|
312 | 247 | return mock.patch.object( |
|
313 | 248 | hooks_daemon, 'TCPServer', return_value=tcp_server) |
|
314 | 249 | |
|
315 | 250 | def _thread_patcher(self, thread): |
|
316 | 251 | return mock.patch.object( |
|
317 | 252 | hooks_daemon.threading, 'Thread', return_value=thread) |
|
318 | 253 | |
|
319 | 254 | |
|
320 | 255 | class TestPrepareHooksDaemon(object): |
|
321 |
@pytest.mark.parametrize('protocol', ('http', |
|
|
256 | @pytest.mark.parametrize('protocol', ('http',)) | |
|
322 | 257 | def test_returns_dummy_hooks_callback_daemon_when_using_direct_calls( |
|
323 | 258 | self, protocol): |
|
324 | 259 | expected_extras = {'extra1': 'value1'} |
|
325 | 260 | callback, extras = hooks_daemon.prepare_callback_daemon( |
|
326 | 261 | expected_extras.copy(), protocol=protocol, use_direct_calls=True) |
|
327 | 262 | assert isinstance(callback, hooks_daemon.DummyHooksCallbackDaemon) |
|
328 | 263 | expected_extras['hooks_module'] = 'rhodecode.lib.hooks_daemon' |
|
329 | 264 | assert extras == expected_extras |
|
330 | 265 | |
|
331 | 266 | @pytest.mark.parametrize('protocol, expected_class', ( |
|
332 |
('p |
|
|
333 | ('http', hooks_daemon.HttpHooksCallbackDaemon) | |
|
267 | ('http', hooks_daemon.HttpHooksCallbackDaemon), | |
|
334 | 268 | )) |
|
335 | 269 | def test_returns_real_hooks_callback_daemon_when_protocol_is_specified( |
|
336 | 270 | self, protocol, expected_class): |
|
337 | 271 | expected_extras = { |
|
338 | 272 | 'extra1': 'value1', |
|
339 | 273 | 'hooks_protocol': protocol.lower() |
|
340 | 274 | } |
|
341 | 275 | callback, extras = hooks_daemon.prepare_callback_daemon( |
|
342 | 276 | expected_extras.copy(), protocol=protocol, use_direct_calls=False) |
|
343 | 277 | assert isinstance(callback, expected_class) |
|
344 | 278 | hooks_uri = extras.pop('hooks_uri') |
|
345 | 279 | assert extras == expected_extras |
|
346 | if protocol.lower() == 'pyro4': | |
|
347 | assert hooks_uri.startswith('PYRO') | |
|
348 | 280 | |
|
349 | 281 | @pytest.mark.parametrize('protocol', ( |
|
350 | 282 | 'invalid', |
|
351 | 'Pyro4', | |
|
352 | 283 | 'Http', |
|
353 | 284 | 'HTTP', |
|
354 | 285 | )) |
|
355 | 286 | def test_raises_on_invalid_protocol(self, protocol): |
|
356 | 287 | expected_extras = { |
|
357 | 288 | 'extra1': 'value1', |
|
358 | 289 | 'hooks_protocol': protocol.lower() |
|
359 | 290 | } |
|
360 | 291 | with pytest.raises(Exception): |
|
361 | 292 | callback, extras = hooks_daemon.prepare_callback_daemon( |
|
362 | 293 | expected_extras.copy(), |
|
363 | 294 | protocol=protocol, |
|
364 | 295 | use_direct_calls=False) |
|
365 | 296 | |
|
366 | 297 | |
|
367 | 298 | class MockRequest(object): |
|
368 | 299 | def __init__(self, request): |
|
369 | 300 | self.request = request |
|
370 | 301 | self.input_stream = StringIO(b'{}'.format(self.request)) |
|
371 | 302 | self.output_stream = StringIO() |
|
372 | 303 | |
|
373 | 304 | def makefile(self, mode, *args, **kwargs): |
|
374 | 305 | return self.output_stream if mode == 'wb' else self.input_stream |
|
375 | 306 | |
|
376 | 307 | |
|
377 | 308 | class MockServer(object): |
|
378 | 309 | def __init__(self, Handler, request): |
|
379 | 310 | ip_port = ('0.0.0.0', 8888) |
|
380 | 311 | self.request = MockRequest(request) |
|
381 | 312 | self.handler = Handler(self.request, ip_port, self) |
|
382 | 313 | |
|
383 | 314 | |
|
384 | 315 | @pytest.fixture |
|
385 | 316 | def tcp_server(): |
|
386 | 317 | server = mock.Mock() |
|
387 | 318 | server.server_address = ('127.0.0.1', 8881) |
|
388 | 319 | return server |
@@ -1,852 +1,851 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import mock |
|
22 | 22 | import pytest |
|
23 | 23 | import textwrap |
|
24 | 24 | |
|
25 | 25 | import rhodecode |
|
26 | 26 | from rhodecode.lib.utils2 import safe_unicode |
|
27 | 27 | from rhodecode.lib.vcs.backends import get_backend |
|
28 | 28 | from rhodecode.lib.vcs.backends.base import ( |
|
29 | 29 | MergeResponse, MergeFailureReason, Reference) |
|
30 | 30 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
31 | 31 | from rhodecode.lib.vcs.nodes import FileNode |
|
32 | 32 | from rhodecode.model.comment import CommentsModel |
|
33 | 33 | from rhodecode.model.db import PullRequest, Session |
|
34 | 34 | from rhodecode.model.pull_request import PullRequestModel |
|
35 | 35 | from rhodecode.model.user import UserModel |
|
36 | 36 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | pytestmark = [ |
|
40 | 40 | pytest.mark.backends("git", "hg"), |
|
41 | 41 | ] |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | class TestPullRequestModel: |
|
45 | 45 | |
|
46 | 46 | @pytest.fixture |
|
47 | 47 | def pull_request(self, request, backend, pr_util): |
|
48 | 48 | """ |
|
49 | 49 | A pull request combined with multiples patches. |
|
50 | 50 | """ |
|
51 | 51 | BackendClass = get_backend(backend.alias) |
|
52 | 52 | self.merge_patcher = mock.patch.object(BackendClass, 'merge') |
|
53 | 53 | self.workspace_remove_patcher = mock.patch.object( |
|
54 | 54 | BackendClass, 'cleanup_merge_workspace') |
|
55 | 55 | |
|
56 | 56 | self.workspace_remove_mock = self.workspace_remove_patcher.start() |
|
57 | 57 | self.merge_mock = self.merge_patcher.start() |
|
58 | 58 | self.comment_patcher = mock.patch( |
|
59 | 59 | 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status') |
|
60 | 60 | self.comment_patcher.start() |
|
61 | 61 | self.notification_patcher = mock.patch( |
|
62 | 62 | 'rhodecode.model.notification.NotificationModel.create') |
|
63 | 63 | self.notification_patcher.start() |
|
64 | 64 | self.helper_patcher = mock.patch( |
|
65 | 65 | 'rhodecode.lib.helpers.url') |
|
66 | 66 | self.helper_patcher.start() |
|
67 | 67 | |
|
68 | 68 | self.hook_patcher = mock.patch.object(PullRequestModel, |
|
69 | 69 | '_trigger_pull_request_hook') |
|
70 | 70 | self.hook_mock = self.hook_patcher.start() |
|
71 | 71 | |
|
72 | 72 | self.invalidation_patcher = mock.patch( |
|
73 | 73 | 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation') |
|
74 | 74 | self.invalidation_mock = self.invalidation_patcher.start() |
|
75 | 75 | |
|
76 | 76 | self.pull_request = pr_util.create_pull_request( |
|
77 | 77 | mergeable=True, name_suffix=u'ąć') |
|
78 | 78 | self.source_commit = self.pull_request.source_ref_parts.commit_id |
|
79 | 79 | self.target_commit = self.pull_request.target_ref_parts.commit_id |
|
80 | 80 | self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id |
|
81 | 81 | |
|
82 | 82 | @request.addfinalizer |
|
83 | 83 | def cleanup_pull_request(): |
|
84 | 84 | calls = [mock.call( |
|
85 | 85 | self.pull_request, self.pull_request.author, 'create')] |
|
86 | 86 | self.hook_mock.assert_has_calls(calls) |
|
87 | 87 | |
|
88 | 88 | self.workspace_remove_patcher.stop() |
|
89 | 89 | self.merge_patcher.stop() |
|
90 | 90 | self.comment_patcher.stop() |
|
91 | 91 | self.notification_patcher.stop() |
|
92 | 92 | self.helper_patcher.stop() |
|
93 | 93 | self.hook_patcher.stop() |
|
94 | 94 | self.invalidation_patcher.stop() |
|
95 | 95 | |
|
96 | 96 | return self.pull_request |
|
97 | 97 | |
|
98 | 98 | def test_get_all(self, pull_request): |
|
99 | 99 | prs = PullRequestModel().get_all(pull_request.target_repo) |
|
100 | 100 | assert isinstance(prs, list) |
|
101 | 101 | assert len(prs) == 1 |
|
102 | 102 | |
|
103 | 103 | def test_count_all(self, pull_request): |
|
104 | 104 | pr_count = PullRequestModel().count_all(pull_request.target_repo) |
|
105 | 105 | assert pr_count == 1 |
|
106 | 106 | |
|
107 | 107 | def test_get_awaiting_review(self, pull_request): |
|
108 | 108 | prs = PullRequestModel().get_awaiting_review(pull_request.target_repo) |
|
109 | 109 | assert isinstance(prs, list) |
|
110 | 110 | assert len(prs) == 1 |
|
111 | 111 | |
|
112 | 112 | def test_count_awaiting_review(self, pull_request): |
|
113 | 113 | pr_count = PullRequestModel().count_awaiting_review( |
|
114 | 114 | pull_request.target_repo) |
|
115 | 115 | assert pr_count == 1 |
|
116 | 116 | |
|
117 | 117 | def test_get_awaiting_my_review(self, pull_request): |
|
118 | 118 | PullRequestModel().update_reviewers( |
|
119 | 119 | pull_request, [(pull_request.author, ['author'])]) |
|
120 | 120 | prs = PullRequestModel().get_awaiting_my_review( |
|
121 | 121 | pull_request.target_repo, user_id=pull_request.author.user_id) |
|
122 | 122 | assert isinstance(prs, list) |
|
123 | 123 | assert len(prs) == 1 |
|
124 | 124 | |
|
125 | 125 | def test_count_awaiting_my_review(self, pull_request): |
|
126 | 126 | PullRequestModel().update_reviewers( |
|
127 | 127 | pull_request, [(pull_request.author, ['author'])]) |
|
128 | 128 | pr_count = PullRequestModel().count_awaiting_my_review( |
|
129 | 129 | pull_request.target_repo, user_id=pull_request.author.user_id) |
|
130 | 130 | assert pr_count == 1 |
|
131 | 131 | |
|
132 | 132 | def test_delete_calls_cleanup_merge(self, pull_request): |
|
133 | 133 | PullRequestModel().delete(pull_request) |
|
134 | 134 | |
|
135 | 135 | self.workspace_remove_mock.assert_called_once_with( |
|
136 | 136 | self.workspace_id) |
|
137 | 137 | |
|
138 | 138 | def test_close_calls_cleanup_and_hook(self, pull_request): |
|
139 | 139 | PullRequestModel().close_pull_request( |
|
140 | 140 | pull_request, pull_request.author) |
|
141 | 141 | |
|
142 | 142 | self.workspace_remove_mock.assert_called_once_with( |
|
143 | 143 | self.workspace_id) |
|
144 | 144 | self.hook_mock.assert_called_with( |
|
145 | 145 | self.pull_request, self.pull_request.author, 'close') |
|
146 | 146 | |
|
147 | 147 | def test_merge_status(self, pull_request): |
|
148 | 148 | self.merge_mock.return_value = MergeResponse( |
|
149 | 149 | True, False, None, MergeFailureReason.NONE) |
|
150 | 150 | |
|
151 | 151 | assert pull_request._last_merge_source_rev is None |
|
152 | 152 | assert pull_request._last_merge_target_rev is None |
|
153 | 153 | assert pull_request._last_merge_status is None |
|
154 | 154 | |
|
155 | 155 | status, msg = PullRequestModel().merge_status(pull_request) |
|
156 | 156 | assert status is True |
|
157 | 157 | assert msg.eval() == 'This pull request can be automatically merged.' |
|
158 | 158 | self.merge_mock.assert_called_once_with( |
|
159 | 159 | pull_request.target_ref_parts, |
|
160 | 160 | pull_request.source_repo.scm_instance(), |
|
161 | 161 | pull_request.source_ref_parts, self.workspace_id, dry_run=True, |
|
162 | 162 | use_rebase=False) |
|
163 | 163 | |
|
164 | 164 | assert pull_request._last_merge_source_rev == self.source_commit |
|
165 | 165 | assert pull_request._last_merge_target_rev == self.target_commit |
|
166 | 166 | assert pull_request._last_merge_status is MergeFailureReason.NONE |
|
167 | 167 | |
|
168 | 168 | self.merge_mock.reset_mock() |
|
169 | 169 | status, msg = PullRequestModel().merge_status(pull_request) |
|
170 | 170 | assert status is True |
|
171 | 171 | assert msg.eval() == 'This pull request can be automatically merged.' |
|
172 | 172 | assert self.merge_mock.called is False |
|
173 | 173 | |
|
174 | 174 | def test_merge_status_known_failure(self, pull_request): |
|
175 | 175 | self.merge_mock.return_value = MergeResponse( |
|
176 | 176 | False, False, None, MergeFailureReason.MERGE_FAILED) |
|
177 | 177 | |
|
178 | 178 | assert pull_request._last_merge_source_rev is None |
|
179 | 179 | assert pull_request._last_merge_target_rev is None |
|
180 | 180 | assert pull_request._last_merge_status is None |
|
181 | 181 | |
|
182 | 182 | status, msg = PullRequestModel().merge_status(pull_request) |
|
183 | 183 | assert status is False |
|
184 | 184 | assert ( |
|
185 | 185 | msg.eval() == |
|
186 | 186 | 'This pull request cannot be merged because of merge conflicts.') |
|
187 | 187 | self.merge_mock.assert_called_once_with( |
|
188 | 188 | pull_request.target_ref_parts, |
|
189 | 189 | pull_request.source_repo.scm_instance(), |
|
190 | 190 | pull_request.source_ref_parts, self.workspace_id, dry_run=True, |
|
191 | 191 | use_rebase=False) |
|
192 | 192 | |
|
193 | 193 | assert pull_request._last_merge_source_rev == self.source_commit |
|
194 | 194 | assert pull_request._last_merge_target_rev == self.target_commit |
|
195 | 195 | assert ( |
|
196 | 196 | pull_request._last_merge_status is MergeFailureReason.MERGE_FAILED) |
|
197 | 197 | |
|
198 | 198 | self.merge_mock.reset_mock() |
|
199 | 199 | status, msg = PullRequestModel().merge_status(pull_request) |
|
200 | 200 | assert status is False |
|
201 | 201 | assert ( |
|
202 | 202 | msg.eval() == |
|
203 | 203 | 'This pull request cannot be merged because of merge conflicts.') |
|
204 | 204 | assert self.merge_mock.called is False |
|
205 | 205 | |
|
206 | 206 | def test_merge_status_unknown_failure(self, pull_request): |
|
207 | 207 | self.merge_mock.return_value = MergeResponse( |
|
208 | 208 | False, False, None, MergeFailureReason.UNKNOWN) |
|
209 | 209 | |
|
210 | 210 | assert pull_request._last_merge_source_rev is None |
|
211 | 211 | assert pull_request._last_merge_target_rev is None |
|
212 | 212 | assert pull_request._last_merge_status is None |
|
213 | 213 | |
|
214 | 214 | status, msg = PullRequestModel().merge_status(pull_request) |
|
215 | 215 | assert status is False |
|
216 | 216 | assert msg.eval() == ( |
|
217 | 217 | 'This pull request cannot be merged because of an unhandled' |
|
218 | 218 | ' exception.') |
|
219 | 219 | self.merge_mock.assert_called_once_with( |
|
220 | 220 | pull_request.target_ref_parts, |
|
221 | 221 | pull_request.source_repo.scm_instance(), |
|
222 | 222 | pull_request.source_ref_parts, self.workspace_id, dry_run=True, |
|
223 | 223 | use_rebase=False) |
|
224 | 224 | |
|
225 | 225 | assert pull_request._last_merge_source_rev is None |
|
226 | 226 | assert pull_request._last_merge_target_rev is None |
|
227 | 227 | assert pull_request._last_merge_status is None |
|
228 | 228 | |
|
229 | 229 | self.merge_mock.reset_mock() |
|
230 | 230 | status, msg = PullRequestModel().merge_status(pull_request) |
|
231 | 231 | assert status is False |
|
232 | 232 | assert msg.eval() == ( |
|
233 | 233 | 'This pull request cannot be merged because of an unhandled' |
|
234 | 234 | ' exception.') |
|
235 | 235 | assert self.merge_mock.called is True |
|
236 | 236 | |
|
237 | 237 | def test_merge_status_when_target_is_locked(self, pull_request): |
|
238 | 238 | pull_request.target_repo.locked = [1, u'12345.50', 'lock_web'] |
|
239 | 239 | status, msg = PullRequestModel().merge_status(pull_request) |
|
240 | 240 | assert status is False |
|
241 | 241 | assert msg.eval() == ( |
|
242 | 242 | 'This pull request cannot be merged because the target repository' |
|
243 | 243 | ' is locked.') |
|
244 | 244 | |
|
245 | 245 | def test_merge_status_requirements_check_target(self, pull_request): |
|
246 | 246 | |
|
247 | 247 | def has_largefiles(self, repo): |
|
248 | 248 | return repo == pull_request.source_repo |
|
249 | 249 | |
|
250 | 250 | patcher = mock.patch.object( |
|
251 | 251 | PullRequestModel, '_has_largefiles', has_largefiles) |
|
252 | 252 | with patcher: |
|
253 | 253 | status, msg = PullRequestModel().merge_status(pull_request) |
|
254 | 254 | |
|
255 | 255 | assert status is False |
|
256 | 256 | assert msg == 'Target repository large files support is disabled.' |
|
257 | 257 | |
|
258 | 258 | def test_merge_status_requirements_check_source(self, pull_request): |
|
259 | 259 | |
|
260 | 260 | def has_largefiles(self, repo): |
|
261 | 261 | return repo == pull_request.target_repo |
|
262 | 262 | |
|
263 | 263 | patcher = mock.patch.object( |
|
264 | 264 | PullRequestModel, '_has_largefiles', has_largefiles) |
|
265 | 265 | with patcher: |
|
266 | 266 | status, msg = PullRequestModel().merge_status(pull_request) |
|
267 | 267 | |
|
268 | 268 | assert status is False |
|
269 | 269 | assert msg == 'Source repository large files support is disabled.' |
|
270 | 270 | |
|
271 | 271 | def test_merge(self, pull_request, merge_extras): |
|
272 | 272 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
273 | 273 | merge_ref = Reference( |
|
274 | 274 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
275 | 275 | self.merge_mock.return_value = MergeResponse( |
|
276 | 276 | True, True, merge_ref, MergeFailureReason.NONE) |
|
277 | 277 | |
|
278 | 278 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
279 | 279 | PullRequestModel().merge( |
|
280 | 280 | pull_request, pull_request.author, extras=merge_extras) |
|
281 | 281 | |
|
282 | 282 | message = ( |
|
283 | 283 | u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}' |
|
284 | 284 | u'\n\n {pr_title}'.format( |
|
285 | 285 | pr_id=pull_request.pull_request_id, |
|
286 | 286 | source_repo=safe_unicode( |
|
287 | 287 | pull_request.source_repo.scm_instance().name), |
|
288 | 288 | source_ref_name=pull_request.source_ref_parts.name, |
|
289 | 289 | pr_title=safe_unicode(pull_request.title) |
|
290 | 290 | ) |
|
291 | 291 | ) |
|
292 | 292 | self.merge_mock.assert_called_once_with( |
|
293 | 293 | pull_request.target_ref_parts, |
|
294 | 294 | pull_request.source_repo.scm_instance(), |
|
295 | 295 | pull_request.source_ref_parts, self.workspace_id, |
|
296 | 296 | user_name=user.username, user_email=user.email, message=message, |
|
297 | 297 | use_rebase=False |
|
298 | 298 | ) |
|
299 | 299 | self.invalidation_mock.assert_called_once_with( |
|
300 | 300 | pull_request.target_repo.repo_name) |
|
301 | 301 | |
|
302 | 302 | self.hook_mock.assert_called_with( |
|
303 | 303 | self.pull_request, self.pull_request.author, 'merge') |
|
304 | 304 | |
|
305 | 305 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
306 | 306 | assert ( |
|
307 | 307 | pull_request.merge_rev == |
|
308 | 308 | '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
309 | 309 | |
|
310 | 310 | def test_merge_failed(self, pull_request, merge_extras): |
|
311 | 311 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
312 | 312 | merge_ref = Reference( |
|
313 | 313 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
314 | 314 | self.merge_mock.return_value = MergeResponse( |
|
315 | 315 | False, False, merge_ref, MergeFailureReason.MERGE_FAILED) |
|
316 | 316 | |
|
317 | 317 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
318 | 318 | PullRequestModel().merge( |
|
319 | 319 | pull_request, pull_request.author, extras=merge_extras) |
|
320 | 320 | |
|
321 | 321 | message = ( |
|
322 | 322 | u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}' |
|
323 | 323 | u'\n\n {pr_title}'.format( |
|
324 | 324 | pr_id=pull_request.pull_request_id, |
|
325 | 325 | source_repo=safe_unicode( |
|
326 | 326 | pull_request.source_repo.scm_instance().name), |
|
327 | 327 | source_ref_name=pull_request.source_ref_parts.name, |
|
328 | 328 | pr_title=safe_unicode(pull_request.title) |
|
329 | 329 | ) |
|
330 | 330 | ) |
|
331 | 331 | self.merge_mock.assert_called_once_with( |
|
332 | 332 | pull_request.target_ref_parts, |
|
333 | 333 | pull_request.source_repo.scm_instance(), |
|
334 | 334 | pull_request.source_ref_parts, self.workspace_id, |
|
335 | 335 | user_name=user.username, user_email=user.email, message=message, |
|
336 | 336 | use_rebase=False |
|
337 | 337 | ) |
|
338 | 338 | |
|
339 | 339 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
340 | 340 | assert self.invalidation_mock.called is False |
|
341 | 341 | assert pull_request.merge_rev is None |
|
342 | 342 | |
|
343 | 343 | def test_get_commit_ids(self, pull_request): |
|
344 | 344 | # The PR has been not merget yet, so expect an exception |
|
345 | 345 | with pytest.raises(ValueError): |
|
346 | 346 | PullRequestModel()._get_commit_ids(pull_request) |
|
347 | 347 | |
|
348 | 348 | # Merge revision is in the revisions list |
|
349 | 349 | pull_request.merge_rev = pull_request.revisions[0] |
|
350 | 350 | commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
351 | 351 | assert commit_ids == pull_request.revisions |
|
352 | 352 | |
|
353 | 353 | # Merge revision is not in the revisions list |
|
354 | 354 | pull_request.merge_rev = 'f000' * 10 |
|
355 | 355 | commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
356 | 356 | assert commit_ids == pull_request.revisions + [pull_request.merge_rev] |
|
357 | 357 | |
|
358 | 358 | def test_get_diff_from_pr_version(self, pull_request): |
|
359 | 359 | source_repo = pull_request.source_repo |
|
360 | 360 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
361 | 361 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
362 | 362 | diff = PullRequestModel()._get_diff_from_pr_or_version( |
|
363 | 363 | source_repo, source_ref_id, target_ref_id, context=6) |
|
364 | 364 | assert 'file_1' in diff.raw |
|
365 | 365 | |
|
366 | 366 | def test_generate_title_returns_unicode(self): |
|
367 | 367 | title = PullRequestModel().generate_pullrequest_title( |
|
368 | 368 | source='source-dummy', |
|
369 | 369 | source_ref='source-ref-dummy', |
|
370 | 370 | target='target-dummy', |
|
371 | 371 | ) |
|
372 | 372 | assert type(title) == unicode |
|
373 | 373 | |
|
374 | 374 | |
|
375 | 375 | class TestIntegrationMerge(object): |
|
376 | 376 | @pytest.mark.parametrize('extra_config', ( |
|
377 | 377 | {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False}, |
|
378 | {'vcs.hooks.protocol': 'Pyro4', 'vcs.hooks.direct_calls': False}, | |
|
379 | 378 | )) |
|
380 | 379 | def test_merge_triggers_push_hooks( |
|
381 | 380 | self, pr_util, user_admin, capture_rcextensions, merge_extras, |
|
382 | 381 | extra_config): |
|
383 | 382 | pull_request = pr_util.create_pull_request( |
|
384 | 383 | approved=True, mergeable=True) |
|
385 | 384 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
386 | 385 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
387 | 386 | Session().commit() |
|
388 | 387 | |
|
389 | 388 | with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False): |
|
390 | 389 | merge_state = PullRequestModel().merge( |
|
391 | 390 | pull_request, user_admin, extras=merge_extras) |
|
392 | 391 | |
|
393 | 392 | assert merge_state.executed |
|
394 | 393 | assert 'pre_push' in capture_rcextensions |
|
395 | 394 | assert 'post_push' in capture_rcextensions |
|
396 | 395 | |
|
397 | 396 | def test_merge_can_be_rejected_by_pre_push_hook( |
|
398 | 397 | self, pr_util, user_admin, capture_rcextensions, merge_extras): |
|
399 | 398 | pull_request = pr_util.create_pull_request( |
|
400 | 399 | approved=True, mergeable=True) |
|
401 | 400 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
402 | 401 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
403 | 402 | Session().commit() |
|
404 | 403 | |
|
405 | 404 | with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull: |
|
406 | 405 | pre_pull.side_effect = RepositoryError("Disallow push!") |
|
407 | 406 | merge_status = PullRequestModel().merge( |
|
408 | 407 | pull_request, user_admin, extras=merge_extras) |
|
409 | 408 | |
|
410 | 409 | assert not merge_status.executed |
|
411 | 410 | assert 'pre_push' not in capture_rcextensions |
|
412 | 411 | assert 'post_push' not in capture_rcextensions |
|
413 | 412 | |
|
414 | 413 | def test_merge_fails_if_target_is_locked( |
|
415 | 414 | self, pr_util, user_regular, merge_extras): |
|
416 | 415 | pull_request = pr_util.create_pull_request( |
|
417 | 416 | approved=True, mergeable=True) |
|
418 | 417 | locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web'] |
|
419 | 418 | pull_request.target_repo.locked = locked_by |
|
420 | 419 | # TODO: johbo: Check if this can work based on the database, currently |
|
421 | 420 | # all data is pre-computed, that's why just updating the DB is not |
|
422 | 421 | # enough. |
|
423 | 422 | merge_extras['locked_by'] = locked_by |
|
424 | 423 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
425 | 424 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
426 | 425 | Session().commit() |
|
427 | 426 | merge_status = PullRequestModel().merge( |
|
428 | 427 | pull_request, user_regular, extras=merge_extras) |
|
429 | 428 | assert not merge_status.executed |
|
430 | 429 | |
|
431 | 430 | |
|
432 | 431 | @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [ |
|
433 | 432 | (False, 1, 0), |
|
434 | 433 | (True, 0, 1), |
|
435 | 434 | ]) |
|
436 | 435 | def test_outdated_comments( |
|
437 | 436 | pr_util, use_outdated, inlines_count, outdated_count): |
|
438 | 437 | pull_request = pr_util.create_pull_request() |
|
439 | 438 | pr_util.create_inline_comment(file_path='not_in_updated_diff') |
|
440 | 439 | |
|
441 | 440 | with outdated_comments_patcher(use_outdated) as outdated_comment_mock: |
|
442 | 441 | pr_util.add_one_commit() |
|
443 | 442 | assert_inline_comments( |
|
444 | 443 | pull_request, visible=inlines_count, outdated=outdated_count) |
|
445 | 444 | outdated_comment_mock.assert_called_with(pull_request) |
|
446 | 445 | |
|
447 | 446 | |
|
448 | 447 | @pytest.fixture |
|
449 | 448 | def merge_extras(user_regular): |
|
450 | 449 | """ |
|
451 | 450 | Context for the vcs operation when running a merge. |
|
452 | 451 | """ |
|
453 | 452 | extras = { |
|
454 | 453 | 'ip': '127.0.0.1', |
|
455 | 454 | 'username': user_regular.username, |
|
456 | 455 | 'action': 'push', |
|
457 | 456 | 'repository': 'fake_target_repo_name', |
|
458 | 457 | 'scm': 'git', |
|
459 | 458 | 'config': 'fake_config_ini_path', |
|
460 | 459 | 'make_lock': None, |
|
461 | 460 | 'locked_by': [None, None, None], |
|
462 | 461 | 'server_url': 'http://test.example.com:5000', |
|
463 | 462 | 'hooks': ['push', 'pull'], |
|
464 | 463 | 'is_shadow_repo': False, |
|
465 | 464 | } |
|
466 | 465 | return extras |
|
467 | 466 | |
|
468 | 467 | |
|
469 | 468 | class TestUpdateCommentHandling(object): |
|
470 | 469 | |
|
471 | 470 | @pytest.fixture(autouse=True, scope='class') |
|
472 | 471 | def enable_outdated_comments(self, request, pylonsapp): |
|
473 | 472 | config_patch = mock.patch.dict( |
|
474 | 473 | 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True}) |
|
475 | 474 | config_patch.start() |
|
476 | 475 | |
|
477 | 476 | @request.addfinalizer |
|
478 | 477 | def cleanup(): |
|
479 | 478 | config_patch.stop() |
|
480 | 479 | |
|
481 | 480 | def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util): |
|
482 | 481 | commits = [ |
|
483 | 482 | {'message': 'a'}, |
|
484 | 483 | {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]}, |
|
485 | 484 | {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]}, |
|
486 | 485 | ] |
|
487 | 486 | pull_request = pr_util.create_pull_request( |
|
488 | 487 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
489 | 488 | pr_util.create_inline_comment(file_path='file_b') |
|
490 | 489 | pr_util.add_one_commit(head='c') |
|
491 | 490 | |
|
492 | 491 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
493 | 492 | |
|
494 | 493 | def test_comment_stays_unflagged_on_change_above(self, pr_util): |
|
495 | 494 | original_content = ''.join( |
|
496 | 495 | ['line {}\n'.format(x) for x in range(1, 11)]) |
|
497 | 496 | updated_content = 'new_line_at_top\n' + original_content |
|
498 | 497 | commits = [ |
|
499 | 498 | {'message': 'a'}, |
|
500 | 499 | {'message': 'b', 'added': [FileNode('file_b', original_content)]}, |
|
501 | 500 | {'message': 'c', 'changed': [FileNode('file_b', updated_content)]}, |
|
502 | 501 | ] |
|
503 | 502 | pull_request = pr_util.create_pull_request( |
|
504 | 503 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
505 | 504 | |
|
506 | 505 | with outdated_comments_patcher(): |
|
507 | 506 | comment = pr_util.create_inline_comment( |
|
508 | 507 | line_no=u'n8', file_path='file_b') |
|
509 | 508 | pr_util.add_one_commit(head='c') |
|
510 | 509 | |
|
511 | 510 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
512 | 511 | assert comment.line_no == u'n9' |
|
513 | 512 | |
|
514 | 513 | def test_comment_stays_unflagged_on_change_below(self, pr_util): |
|
515 | 514 | original_content = ''.join(['line {}\n'.format(x) for x in range(10)]) |
|
516 | 515 | updated_content = original_content + 'new_line_at_end\n' |
|
517 | 516 | commits = [ |
|
518 | 517 | {'message': 'a'}, |
|
519 | 518 | {'message': 'b', 'added': [FileNode('file_b', original_content)]}, |
|
520 | 519 | {'message': 'c', 'changed': [FileNode('file_b', updated_content)]}, |
|
521 | 520 | ] |
|
522 | 521 | pull_request = pr_util.create_pull_request( |
|
523 | 522 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
524 | 523 | pr_util.create_inline_comment(file_path='file_b') |
|
525 | 524 | pr_util.add_one_commit(head='c') |
|
526 | 525 | |
|
527 | 526 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
528 | 527 | |
|
529 | 528 | @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9']) |
|
530 | 529 | def test_comment_flagged_on_change_around_context(self, pr_util, line_no): |
|
531 | 530 | base_lines = ['line {}\n'.format(x) for x in range(1, 13)] |
|
532 | 531 | change_lines = list(base_lines) |
|
533 | 532 | change_lines.insert(6, 'line 6a added\n') |
|
534 | 533 | |
|
535 | 534 | # Changes on the last line of sight |
|
536 | 535 | update_lines = list(change_lines) |
|
537 | 536 | update_lines[0] = 'line 1 changed\n' |
|
538 | 537 | update_lines[-1] = 'line 12 changed\n' |
|
539 | 538 | |
|
540 | 539 | def file_b(lines): |
|
541 | 540 | return FileNode('file_b', ''.join(lines)) |
|
542 | 541 | |
|
543 | 542 | commits = [ |
|
544 | 543 | {'message': 'a', 'added': [file_b(base_lines)]}, |
|
545 | 544 | {'message': 'b', 'changed': [file_b(change_lines)]}, |
|
546 | 545 | {'message': 'c', 'changed': [file_b(update_lines)]}, |
|
547 | 546 | ] |
|
548 | 547 | |
|
549 | 548 | pull_request = pr_util.create_pull_request( |
|
550 | 549 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
551 | 550 | pr_util.create_inline_comment(line_no=line_no, file_path='file_b') |
|
552 | 551 | |
|
553 | 552 | with outdated_comments_patcher(): |
|
554 | 553 | pr_util.add_one_commit(head='c') |
|
555 | 554 | assert_inline_comments(pull_request, visible=0, outdated=1) |
|
556 | 555 | |
|
557 | 556 | @pytest.mark.parametrize("change, content", [ |
|
558 | 557 | ('changed', 'changed\n'), |
|
559 | 558 | ('removed', ''), |
|
560 | 559 | ], ids=['changed', 'removed']) |
|
561 | 560 | def test_comment_flagged_on_change(self, pr_util, change, content): |
|
562 | 561 | commits = [ |
|
563 | 562 | {'message': 'a'}, |
|
564 | 563 | {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]}, |
|
565 | 564 | {'message': 'c', change: [FileNode('file_b', content)]}, |
|
566 | 565 | ] |
|
567 | 566 | pull_request = pr_util.create_pull_request( |
|
568 | 567 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
569 | 568 | pr_util.create_inline_comment(file_path='file_b') |
|
570 | 569 | |
|
571 | 570 | with outdated_comments_patcher(): |
|
572 | 571 | pr_util.add_one_commit(head='c') |
|
573 | 572 | assert_inline_comments(pull_request, visible=0, outdated=1) |
|
574 | 573 | |
|
575 | 574 | |
|
576 | 575 | class TestUpdateChangedFiles(object): |
|
577 | 576 | |
|
578 | 577 | def test_no_changes_on_unchanged_diff(self, pr_util): |
|
579 | 578 | commits = [ |
|
580 | 579 | {'message': 'a'}, |
|
581 | 580 | {'message': 'b', |
|
582 | 581 | 'added': [FileNode('file_b', 'test_content b\n')]}, |
|
583 | 582 | {'message': 'c', |
|
584 | 583 | 'added': [FileNode('file_c', 'test_content c\n')]}, |
|
585 | 584 | ] |
|
586 | 585 | # open a PR from a to b, adding file_b |
|
587 | 586 | pull_request = pr_util.create_pull_request( |
|
588 | 587 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
589 | 588 | name_suffix='per-file-review') |
|
590 | 589 | |
|
591 | 590 | # modify PR adding new file file_c |
|
592 | 591 | pr_util.add_one_commit(head='c') |
|
593 | 592 | |
|
594 | 593 | assert_pr_file_changes( |
|
595 | 594 | pull_request, |
|
596 | 595 | added=['file_c'], |
|
597 | 596 | modified=[], |
|
598 | 597 | removed=[]) |
|
599 | 598 | |
|
600 | 599 | def test_modify_and_undo_modification_diff(self, pr_util): |
|
601 | 600 | commits = [ |
|
602 | 601 | {'message': 'a'}, |
|
603 | 602 | {'message': 'b', |
|
604 | 603 | 'added': [FileNode('file_b', 'test_content b\n')]}, |
|
605 | 604 | {'message': 'c', |
|
606 | 605 | 'changed': [FileNode('file_b', 'test_content b modified\n')]}, |
|
607 | 606 | {'message': 'd', |
|
608 | 607 | 'changed': [FileNode('file_b', 'test_content b\n')]}, |
|
609 | 608 | ] |
|
610 | 609 | # open a PR from a to b, adding file_b |
|
611 | 610 | pull_request = pr_util.create_pull_request( |
|
612 | 611 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
613 | 612 | name_suffix='per-file-review') |
|
614 | 613 | |
|
615 | 614 | # modify PR modifying file file_b |
|
616 | 615 | pr_util.add_one_commit(head='c') |
|
617 | 616 | |
|
618 | 617 | assert_pr_file_changes( |
|
619 | 618 | pull_request, |
|
620 | 619 | added=[], |
|
621 | 620 | modified=['file_b'], |
|
622 | 621 | removed=[]) |
|
623 | 622 | |
|
624 | 623 | # move the head again to d, which rollbacks change, |
|
625 | 624 | # meaning we should indicate no changes |
|
626 | 625 | pr_util.add_one_commit(head='d') |
|
627 | 626 | |
|
628 | 627 | assert_pr_file_changes( |
|
629 | 628 | pull_request, |
|
630 | 629 | added=[], |
|
631 | 630 | modified=[], |
|
632 | 631 | removed=[]) |
|
633 | 632 | |
|
634 | 633 | def test_updated_all_files_in_pr(self, pr_util): |
|
635 | 634 | commits = [ |
|
636 | 635 | {'message': 'a'}, |
|
637 | 636 | {'message': 'b', 'added': [ |
|
638 | 637 | FileNode('file_a', 'test_content a\n'), |
|
639 | 638 | FileNode('file_b', 'test_content b\n'), |
|
640 | 639 | FileNode('file_c', 'test_content c\n')]}, |
|
641 | 640 | {'message': 'c', 'changed': [ |
|
642 | 641 | FileNode('file_a', 'test_content a changed\n'), |
|
643 | 642 | FileNode('file_b', 'test_content b changed\n'), |
|
644 | 643 | FileNode('file_c', 'test_content c changed\n')]}, |
|
645 | 644 | ] |
|
646 | 645 | # open a PR from a to b, changing 3 files |
|
647 | 646 | pull_request = pr_util.create_pull_request( |
|
648 | 647 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
649 | 648 | name_suffix='per-file-review') |
|
650 | 649 | |
|
651 | 650 | pr_util.add_one_commit(head='c') |
|
652 | 651 | |
|
653 | 652 | assert_pr_file_changes( |
|
654 | 653 | pull_request, |
|
655 | 654 | added=[], |
|
656 | 655 | modified=['file_a', 'file_b', 'file_c'], |
|
657 | 656 | removed=[]) |
|
658 | 657 | |
|
659 | 658 | def test_updated_and_removed_all_files_in_pr(self, pr_util): |
|
660 | 659 | commits = [ |
|
661 | 660 | {'message': 'a'}, |
|
662 | 661 | {'message': 'b', 'added': [ |
|
663 | 662 | FileNode('file_a', 'test_content a\n'), |
|
664 | 663 | FileNode('file_b', 'test_content b\n'), |
|
665 | 664 | FileNode('file_c', 'test_content c\n')]}, |
|
666 | 665 | {'message': 'c', 'removed': [ |
|
667 | 666 | FileNode('file_a', 'test_content a changed\n'), |
|
668 | 667 | FileNode('file_b', 'test_content b changed\n'), |
|
669 | 668 | FileNode('file_c', 'test_content c changed\n')]}, |
|
670 | 669 | ] |
|
671 | 670 | # open a PR from a to b, removing 3 files |
|
672 | 671 | pull_request = pr_util.create_pull_request( |
|
673 | 672 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
674 | 673 | name_suffix='per-file-review') |
|
675 | 674 | |
|
676 | 675 | pr_util.add_one_commit(head='c') |
|
677 | 676 | |
|
678 | 677 | assert_pr_file_changes( |
|
679 | 678 | pull_request, |
|
680 | 679 | added=[], |
|
681 | 680 | modified=[], |
|
682 | 681 | removed=['file_a', 'file_b', 'file_c']) |
|
683 | 682 | |
|
684 | 683 | |
|
685 | 684 | def test_update_writes_snapshot_into_pull_request_version(pr_util): |
|
686 | 685 | model = PullRequestModel() |
|
687 | 686 | pull_request = pr_util.create_pull_request() |
|
688 | 687 | pr_util.update_source_repository() |
|
689 | 688 | |
|
690 | 689 | model.update_commits(pull_request) |
|
691 | 690 | |
|
692 | 691 | # Expect that it has a version entry now |
|
693 | 692 | assert len(model.get_versions(pull_request)) == 1 |
|
694 | 693 | |
|
695 | 694 | |
|
696 | 695 | def test_update_skips_new_version_if_unchanged(pr_util): |
|
697 | 696 | pull_request = pr_util.create_pull_request() |
|
698 | 697 | model = PullRequestModel() |
|
699 | 698 | model.update_commits(pull_request) |
|
700 | 699 | |
|
701 | 700 | # Expect that it still has no versions |
|
702 | 701 | assert len(model.get_versions(pull_request)) == 0 |
|
703 | 702 | |
|
704 | 703 | |
|
705 | 704 | def test_update_assigns_comments_to_the_new_version(pr_util): |
|
706 | 705 | model = PullRequestModel() |
|
707 | 706 | pull_request = pr_util.create_pull_request() |
|
708 | 707 | comment = pr_util.create_comment() |
|
709 | 708 | pr_util.update_source_repository() |
|
710 | 709 | |
|
711 | 710 | model.update_commits(pull_request) |
|
712 | 711 | |
|
713 | 712 | # Expect that the comment is linked to the pr version now |
|
714 | 713 | assert comment.pull_request_version == model.get_versions(pull_request)[0] |
|
715 | 714 | |
|
716 | 715 | |
|
717 | 716 | def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util): |
|
718 | 717 | model = PullRequestModel() |
|
719 | 718 | pull_request = pr_util.create_pull_request() |
|
720 | 719 | pr_util.update_source_repository() |
|
721 | 720 | pr_util.update_source_repository() |
|
722 | 721 | |
|
723 | 722 | model.update_commits(pull_request) |
|
724 | 723 | |
|
725 | 724 | # Expect to find a new comment about the change |
|
726 | 725 | expected_message = textwrap.dedent( |
|
727 | 726 | """\ |
|
728 | 727 | Pull request updated. Auto status change to |under_review| |
|
729 | 728 | |
|
730 | 729 | .. role:: added |
|
731 | 730 | .. role:: removed |
|
732 | 731 | .. parsed-literal:: |
|
733 | 732 | |
|
734 | 733 | Changed commits: |
|
735 | 734 | * :added:`1 added` |
|
736 | 735 | * :removed:`0 removed` |
|
737 | 736 | |
|
738 | 737 | Changed files: |
|
739 | 738 | * `A file_2 <#a_c--92ed3b5f07b4>`_ |
|
740 | 739 | |
|
741 | 740 | .. |under_review| replace:: *"Under Review"*""" |
|
742 | 741 | ) |
|
743 | 742 | pull_request_comments = sorted( |
|
744 | 743 | pull_request.comments, key=lambda c: c.modified_at) |
|
745 | 744 | update_comment = pull_request_comments[-1] |
|
746 | 745 | assert update_comment.text == expected_message |
|
747 | 746 | |
|
748 | 747 | |
|
749 | 748 | def test_create_version_from_snapshot_updates_attributes(pr_util): |
|
750 | 749 | pull_request = pr_util.create_pull_request() |
|
751 | 750 | |
|
752 | 751 | # Avoiding default values |
|
753 | 752 | pull_request.status = PullRequest.STATUS_CLOSED |
|
754 | 753 | pull_request._last_merge_source_rev = "0" * 40 |
|
755 | 754 | pull_request._last_merge_target_rev = "1" * 40 |
|
756 | 755 | pull_request._last_merge_status = 1 |
|
757 | 756 | pull_request.merge_rev = "2" * 40 |
|
758 | 757 | |
|
759 | 758 | # Remember automatic values |
|
760 | 759 | created_on = pull_request.created_on |
|
761 | 760 | updated_on = pull_request.updated_on |
|
762 | 761 | |
|
763 | 762 | # Create a new version of the pull request |
|
764 | 763 | version = PullRequestModel()._create_version_from_snapshot(pull_request) |
|
765 | 764 | |
|
766 | 765 | # Check attributes |
|
767 | 766 | assert version.title == pr_util.create_parameters['title'] |
|
768 | 767 | assert version.description == pr_util.create_parameters['description'] |
|
769 | 768 | assert version.status == PullRequest.STATUS_CLOSED |
|
770 | 769 | |
|
771 | 770 | # versions get updated created_on |
|
772 | 771 | assert version.created_on != created_on |
|
773 | 772 | |
|
774 | 773 | assert version.updated_on == updated_on |
|
775 | 774 | assert version.user_id == pull_request.user_id |
|
776 | 775 | assert version.revisions == pr_util.create_parameters['revisions'] |
|
777 | 776 | assert version.source_repo == pr_util.source_repository |
|
778 | 777 | assert version.source_ref == pr_util.create_parameters['source_ref'] |
|
779 | 778 | assert version.target_repo == pr_util.target_repository |
|
780 | 779 | assert version.target_ref == pr_util.create_parameters['target_ref'] |
|
781 | 780 | assert version._last_merge_source_rev == pull_request._last_merge_source_rev |
|
782 | 781 | assert version._last_merge_target_rev == pull_request._last_merge_target_rev |
|
783 | 782 | assert version._last_merge_status == pull_request._last_merge_status |
|
784 | 783 | assert version.merge_rev == pull_request.merge_rev |
|
785 | 784 | assert version.pull_request == pull_request |
|
786 | 785 | |
|
787 | 786 | |
|
788 | 787 | def test_link_comments_to_version_only_updates_unlinked_comments(pr_util): |
|
789 | 788 | version1 = pr_util.create_version_of_pull_request() |
|
790 | 789 | comment_linked = pr_util.create_comment(linked_to=version1) |
|
791 | 790 | comment_unlinked = pr_util.create_comment() |
|
792 | 791 | version2 = pr_util.create_version_of_pull_request() |
|
793 | 792 | |
|
794 | 793 | PullRequestModel()._link_comments_to_version(version2) |
|
795 | 794 | |
|
796 | 795 | # Expect that only the new comment is linked to version2 |
|
797 | 796 | assert ( |
|
798 | 797 | comment_unlinked.pull_request_version_id == |
|
799 | 798 | version2.pull_request_version_id) |
|
800 | 799 | assert ( |
|
801 | 800 | comment_linked.pull_request_version_id == |
|
802 | 801 | version1.pull_request_version_id) |
|
803 | 802 | assert ( |
|
804 | 803 | comment_unlinked.pull_request_version_id != |
|
805 | 804 | comment_linked.pull_request_version_id) |
|
806 | 805 | |
|
807 | 806 | |
|
808 | 807 | def test_calculate_commits(): |
|
809 | 808 | old_ids = [1, 2, 3] |
|
810 | 809 | new_ids = [1, 3, 4, 5] |
|
811 | 810 | change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids) |
|
812 | 811 | assert change.added == [4, 5] |
|
813 | 812 | assert change.common == [1, 3] |
|
814 | 813 | assert change.removed == [2] |
|
815 | 814 | assert change.total == [1, 3, 4, 5] |
|
816 | 815 | |
|
817 | 816 | |
|
818 | 817 | def assert_inline_comments(pull_request, visible=None, outdated=None): |
|
819 | 818 | if visible is not None: |
|
820 | 819 | inline_comments = CommentsModel().get_inline_comments( |
|
821 | 820 | pull_request.target_repo.repo_id, pull_request=pull_request) |
|
822 | 821 | inline_cnt = CommentsModel().get_inline_comments_count( |
|
823 | 822 | inline_comments) |
|
824 | 823 | assert inline_cnt == visible |
|
825 | 824 | if outdated is not None: |
|
826 | 825 | outdated_comments = CommentsModel().get_outdated_comments( |
|
827 | 826 | pull_request.target_repo.repo_id, pull_request) |
|
828 | 827 | assert len(outdated_comments) == outdated |
|
829 | 828 | |
|
830 | 829 | |
|
831 | 830 | def assert_pr_file_changes( |
|
832 | 831 | pull_request, added=None, modified=None, removed=None): |
|
833 | 832 | pr_versions = PullRequestModel().get_versions(pull_request) |
|
834 | 833 | # always use first version, ie original PR to calculate changes |
|
835 | 834 | pull_request_version = pr_versions[0] |
|
836 | 835 | old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs( |
|
837 | 836 | pull_request, pull_request_version) |
|
838 | 837 | file_changes = PullRequestModel()._calculate_file_changes( |
|
839 | 838 | old_diff_data, new_diff_data) |
|
840 | 839 | |
|
841 | 840 | assert added == file_changes.added, \ |
|
842 | 841 | 'expected added:%s vs value:%s' % (added, file_changes.added) |
|
843 | 842 | assert modified == file_changes.modified, \ |
|
844 | 843 | 'expected modified:%s vs value:%s' % (modified, file_changes.modified) |
|
845 | 844 | assert removed == file_changes.removed, \ |
|
846 | 845 | 'expected removed:%s vs value:%s' % (removed, file_changes.removed) |
|
847 | 846 | |
|
848 | 847 | |
|
849 | 848 | def outdated_comments_patcher(use_outdated=True): |
|
850 | 849 | return mock.patch.object( |
|
851 | 850 | CommentsModel, 'use_outdated_comments', |
|
852 | 851 | return_value=use_outdated) |
@@ -1,1825 +1,1805 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import collections |
|
22 | 22 | import datetime |
|
23 | 23 | import hashlib |
|
24 | 24 | import os |
|
25 | 25 | import re |
|
26 | 26 | import pprint |
|
27 | 27 | import shutil |
|
28 | 28 | import socket |
|
29 | 29 | import subprocess32 |
|
30 | 30 | import time |
|
31 | 31 | import uuid |
|
32 | 32 | import dateutil.tz |
|
33 | 33 | |
|
34 | 34 | import mock |
|
35 | 35 | import pyramid.testing |
|
36 | 36 | import pytest |
|
37 | 37 | import colander |
|
38 | 38 | import requests |
|
39 | 39 | |
|
40 | 40 | import rhodecode |
|
41 | 41 | from rhodecode.lib.utils2 import AttributeDict |
|
42 | 42 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
43 | 43 | from rhodecode.model.comment import CommentsModel |
|
44 | 44 | from rhodecode.model.db import ( |
|
45 | 45 | PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup, |
|
46 | 46 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) |
|
47 | 47 | from rhodecode.model.meta import Session |
|
48 | 48 | from rhodecode.model.pull_request import PullRequestModel |
|
49 | 49 | from rhodecode.model.repo import RepoModel |
|
50 | 50 | from rhodecode.model.repo_group import RepoGroupModel |
|
51 | 51 | from rhodecode.model.user import UserModel |
|
52 | 52 | from rhodecode.model.settings import VcsSettingsModel |
|
53 | 53 | from rhodecode.model.user_group import UserGroupModel |
|
54 | 54 | from rhodecode.model.integration import IntegrationModel |
|
55 | 55 | from rhodecode.integrations import integration_type_registry |
|
56 | 56 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
57 | 57 | from rhodecode.lib.utils import repo2db_mapper |
|
58 | 58 | from rhodecode.lib.vcs import create_vcsserver_proxy |
|
59 | 59 | from rhodecode.lib.vcs.backends import get_backend |
|
60 | 60 | from rhodecode.lib.vcs.nodes import FileNode |
|
61 | 61 | from rhodecode.tests import ( |
|
62 | 62 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, |
|
63 | 63 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, |
|
64 | 64 | TEST_USER_REGULAR_PASS) |
|
65 | 65 | from rhodecode.tests.utils import CustomTestApp |
|
66 | 66 | from rhodecode.tests.fixture import Fixture |
|
67 | 67 | |
|
68 | 68 | |
|
69 | 69 | def _split_comma(value): |
|
70 | 70 | return value.split(',') |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | def pytest_addoption(parser): |
|
74 | 74 | parser.addoption( |
|
75 | 75 | '--keep-tmp-path', action='store_true', |
|
76 | 76 | help="Keep the test temporary directories") |
|
77 | 77 | parser.addoption( |
|
78 | 78 | '--backends', action='store', type=_split_comma, |
|
79 | 79 | default=['git', 'hg', 'svn'], |
|
80 | 80 | help="Select which backends to test for backend specific tests.") |
|
81 | 81 | parser.addoption( |
|
82 | 82 | '--dbs', action='store', type=_split_comma, |
|
83 | 83 | default=['sqlite'], |
|
84 | 84 | help="Select which database to test for database specific tests. " |
|
85 | 85 | "Possible options are sqlite,postgres,mysql") |
|
86 | 86 | parser.addoption( |
|
87 | 87 | '--appenlight', '--ae', action='store_true', |
|
88 | 88 | help="Track statistics in appenlight.") |
|
89 | 89 | parser.addoption( |
|
90 | 90 | '--appenlight-api-key', '--ae-key', |
|
91 | 91 | help="API key for Appenlight.") |
|
92 | 92 | parser.addoption( |
|
93 | 93 | '--appenlight-url', '--ae-url', |
|
94 | 94 | default="https://ae.rhodecode.com", |
|
95 | 95 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") |
|
96 | 96 | parser.addoption( |
|
97 | 97 | '--sqlite-connection-string', action='store', |
|
98 | 98 | default='', help="Connection string for the dbs tests with SQLite") |
|
99 | 99 | parser.addoption( |
|
100 | 100 | '--postgres-connection-string', action='store', |
|
101 | 101 | default='', help="Connection string for the dbs tests with Postgres") |
|
102 | 102 | parser.addoption( |
|
103 | 103 | '--mysql-connection-string', action='store', |
|
104 | 104 | default='', help="Connection string for the dbs tests with MySQL") |
|
105 | 105 | parser.addoption( |
|
106 | 106 | '--repeat', type=int, default=100, |
|
107 | 107 | help="Number of repetitions in performance tests.") |
|
108 | 108 | |
|
109 | 109 | |
|
110 | 110 | def pytest_configure(config): |
|
111 | 111 | # Appy the kombu patch early on, needed for test discovery on Python 2.7.11 |
|
112 | 112 | from rhodecode.config import patches |
|
113 | 113 | patches.kombu_1_5_1_python_2_7_11() |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | def pytest_collection_modifyitems(session, config, items): |
|
117 | 117 | # nottest marked, compare nose, used for transition from nose to pytest |
|
118 | 118 | remaining = [ |
|
119 | 119 | i for i in items if getattr(i.obj, '__test__', True)] |
|
120 | 120 | items[:] = remaining |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | def pytest_generate_tests(metafunc): |
|
124 | 124 | # Support test generation based on --backend parameter |
|
125 | 125 | if 'backend_alias' in metafunc.fixturenames: |
|
126 | 126 | backends = get_backends_from_metafunc(metafunc) |
|
127 | 127 | scope = None |
|
128 | 128 | if not backends: |
|
129 | 129 | pytest.skip("Not enabled for any of selected backends") |
|
130 | 130 | metafunc.parametrize('backend_alias', backends, scope=scope) |
|
131 | 131 | elif hasattr(metafunc.function, 'backends'): |
|
132 | 132 | backends = get_backends_from_metafunc(metafunc) |
|
133 | 133 | if not backends: |
|
134 | 134 | pytest.skip("Not enabled for any of selected backends") |
|
135 | 135 | |
|
136 | 136 | |
|
137 | 137 | def get_backends_from_metafunc(metafunc): |
|
138 | 138 | requested_backends = set(metafunc.config.getoption('--backends')) |
|
139 | 139 | if hasattr(metafunc.function, 'backends'): |
|
140 | 140 | # Supported backends by this test function, created from |
|
141 | 141 | # pytest.mark.backends |
|
142 | 142 | backends = metafunc.function.backends.args |
|
143 | 143 | elif hasattr(metafunc.cls, 'backend_alias'): |
|
144 | 144 | # Support class attribute "backend_alias", this is mainly |
|
145 | 145 | # for legacy reasons for tests not yet using pytest.mark.backends |
|
146 | 146 | backends = [metafunc.cls.backend_alias] |
|
147 | 147 | else: |
|
148 | 148 | backends = metafunc.config.getoption('--backends') |
|
149 | 149 | return requested_backends.intersection(backends) |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | @pytest.fixture(scope='session', autouse=True) |
|
153 | 153 | def activate_example_rcextensions(request): |
|
154 | 154 | """ |
|
155 | 155 | Patch in an example rcextensions module which verifies passed in kwargs. |
|
156 | 156 | """ |
|
157 | 157 | from rhodecode.tests.other import example_rcextensions |
|
158 | 158 | |
|
159 | 159 | old_extensions = rhodecode.EXTENSIONS |
|
160 | 160 | rhodecode.EXTENSIONS = example_rcextensions |
|
161 | 161 | |
|
162 | 162 | @request.addfinalizer |
|
163 | 163 | def cleanup(): |
|
164 | 164 | rhodecode.EXTENSIONS = old_extensions |
|
165 | 165 | |
|
166 | 166 | |
|
167 | 167 | @pytest.fixture |
|
168 | 168 | def capture_rcextensions(): |
|
169 | 169 | """ |
|
170 | 170 | Returns the recorded calls to entry points in rcextensions. |
|
171 | 171 | """ |
|
172 | 172 | calls = rhodecode.EXTENSIONS.calls |
|
173 | 173 | calls.clear() |
|
174 | 174 | # Note: At this moment, it is still the empty dict, but that will |
|
175 | 175 | # be filled during the test run and since it is a reference this |
|
176 | 176 | # is enough to make it work. |
|
177 | 177 | return calls |
|
178 | 178 | |
|
179 | 179 | |
|
180 | 180 | @pytest.fixture(scope='session') |
|
181 | 181 | def http_environ_session(): |
|
182 | 182 | """ |
|
183 | 183 | Allow to use "http_environ" in session scope. |
|
184 | 184 | """ |
|
185 | 185 | return http_environ( |
|
186 | 186 | http_host_stub=http_host_stub()) |
|
187 | 187 | |
|
188 | 188 | |
|
189 | 189 | @pytest.fixture |
|
190 | 190 | def http_host_stub(): |
|
191 | 191 | """ |
|
192 | 192 | Value of HTTP_HOST in the test run. |
|
193 | 193 | """ |
|
194 | 194 | return 'test.example.com:80' |
|
195 | 195 | |
|
196 | 196 | |
|
197 | 197 | @pytest.fixture |
|
198 | 198 | def http_environ(http_host_stub): |
|
199 | 199 | """ |
|
200 | 200 | HTTP extra environ keys. |
|
201 | 201 | |
|
202 | 202 | User by the test application and as well for setting up the pylons |
|
203 | 203 | environment. In the case of the fixture "app" it should be possible |
|
204 | 204 | to override this for a specific test case. |
|
205 | 205 | """ |
|
206 | 206 | return { |
|
207 | 207 | 'SERVER_NAME': http_host_stub.split(':')[0], |
|
208 | 208 | 'SERVER_PORT': http_host_stub.split(':')[1], |
|
209 | 209 | 'HTTP_HOST': http_host_stub, |
|
210 | 210 | } |
|
211 | 211 | |
|
212 | 212 | |
|
213 | 213 | @pytest.fixture(scope='function') |
|
214 | 214 | def app(request, pylonsapp, http_environ): |
|
215 | 215 | |
|
216 | 216 | |
|
217 | 217 | app = CustomTestApp( |
|
218 | 218 | pylonsapp, |
|
219 | 219 | extra_environ=http_environ) |
|
220 | 220 | if request.cls: |
|
221 | 221 | request.cls.app = app |
|
222 | 222 | return app |
|
223 | 223 | |
|
224 | 224 | |
|
225 | 225 | @pytest.fixture(scope='session') |
|
226 | 226 | def app_settings(pylonsapp, pylons_config): |
|
227 | 227 | """ |
|
228 | 228 | Settings dictionary used to create the app. |
|
229 | 229 | |
|
230 | 230 | Parses the ini file and passes the result through the sanitize and apply |
|
231 | 231 | defaults mechanism in `rhodecode.config.middleware`. |
|
232 | 232 | """ |
|
233 | 233 | from paste.deploy.loadwsgi import loadcontext, APP |
|
234 | 234 | from rhodecode.config.middleware import ( |
|
235 | 235 | sanitize_settings_and_apply_defaults) |
|
236 | 236 | context = loadcontext(APP, 'config:' + pylons_config) |
|
237 | 237 | settings = sanitize_settings_and_apply_defaults(context.config()) |
|
238 | 238 | return settings |
|
239 | 239 | |
|
240 | 240 | |
|
241 | 241 | @pytest.fixture(scope='session') |
|
242 | 242 | def db(app_settings): |
|
243 | 243 | """ |
|
244 | 244 | Initializes the database connection. |
|
245 | 245 | |
|
246 | 246 | It uses the same settings which are used to create the ``pylonsapp`` or |
|
247 | 247 | ``app`` fixtures. |
|
248 | 248 | """ |
|
249 | 249 | from rhodecode.config.utils import initialize_database |
|
250 | 250 | initialize_database(app_settings) |
|
251 | 251 | |
|
252 | 252 | |
|
253 | 253 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) |
|
254 | 254 | |
|
255 | 255 | |
|
256 | 256 | def _autologin_user(app, *args): |
|
257 | 257 | session = login_user_session(app, *args) |
|
258 | 258 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) |
|
259 | 259 | return LoginData(csrf_token, session['rhodecode_user']) |
|
260 | 260 | |
|
261 | 261 | |
|
262 | 262 | @pytest.fixture |
|
263 | 263 | def autologin_user(app): |
|
264 | 264 | """ |
|
265 | 265 | Utility fixture which makes sure that the admin user is logged in |
|
266 | 266 | """ |
|
267 | 267 | return _autologin_user(app) |
|
268 | 268 | |
|
269 | 269 | |
|
270 | 270 | @pytest.fixture |
|
271 | 271 | def autologin_regular_user(app): |
|
272 | 272 | """ |
|
273 | 273 | Utility fixture which makes sure that the regular user is logged in |
|
274 | 274 | """ |
|
275 | 275 | return _autologin_user( |
|
276 | 276 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
277 | 277 | |
|
278 | 278 | |
|
279 | 279 | @pytest.fixture(scope='function') |
|
280 | 280 | def csrf_token(request, autologin_user): |
|
281 | 281 | return autologin_user.csrf_token |
|
282 | 282 | |
|
283 | 283 | |
|
284 | 284 | @pytest.fixture(scope='function') |
|
285 | 285 | def xhr_header(request): |
|
286 | 286 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} |
|
287 | 287 | |
|
288 | 288 | |
|
289 | 289 | @pytest.fixture |
|
290 | 290 | def real_crypto_backend(monkeypatch): |
|
291 | 291 | """ |
|
292 | 292 | Switch the production crypto backend on for this test. |
|
293 | 293 | |
|
294 | 294 | During the test run the crypto backend is replaced with a faster |
|
295 | 295 | implementation based on the MD5 algorithm. |
|
296 | 296 | """ |
|
297 | 297 | monkeypatch.setattr(rhodecode, 'is_test', False) |
|
298 | 298 | |
|
299 | 299 | |
|
300 | 300 | @pytest.fixture(scope='class') |
|
301 | 301 | def index_location(request, pylonsapp): |
|
302 | 302 | index_location = pylonsapp.config['app_conf']['search.location'] |
|
303 | 303 | if request.cls: |
|
304 | 304 | request.cls.index_location = index_location |
|
305 | 305 | return index_location |
|
306 | 306 | |
|
307 | 307 | |
|
308 | 308 | @pytest.fixture(scope='session', autouse=True) |
|
309 | 309 | def tests_tmp_path(request): |
|
310 | 310 | """ |
|
311 | 311 | Create temporary directory to be used during the test session. |
|
312 | 312 | """ |
|
313 | 313 | if not os.path.exists(TESTS_TMP_PATH): |
|
314 | 314 | os.makedirs(TESTS_TMP_PATH) |
|
315 | 315 | |
|
316 | 316 | if not request.config.getoption('--keep-tmp-path'): |
|
317 | 317 | @request.addfinalizer |
|
318 | 318 | def remove_tmp_path(): |
|
319 | 319 | shutil.rmtree(TESTS_TMP_PATH) |
|
320 | 320 | |
|
321 | 321 | return TESTS_TMP_PATH |
|
322 | 322 | |
|
323 | 323 | |
|
324 | @pytest.fixture(scope='session', autouse=True) | |
|
325 | def patch_pyro_request_scope_proxy_factory(request): | |
|
326 | """ | |
|
327 | Patch the pyro proxy factory to always use the same dummy request object | |
|
328 | when under test. This will return the same pyro proxy on every call. | |
|
329 | """ | |
|
330 | dummy_request = pyramid.testing.DummyRequest() | |
|
331 | ||
|
332 | def mocked_call(self, request=None): | |
|
333 | return self.getProxy(request=dummy_request) | |
|
334 | ||
|
335 | patcher = mock.patch( | |
|
336 | 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__', | |
|
337 | new=mocked_call) | |
|
338 | patcher.start() | |
|
339 | ||
|
340 | @request.addfinalizer | |
|
341 | def undo_patching(): | |
|
342 | patcher.stop() | |
|
343 | ||
|
344 | ||
|
345 | 324 | @pytest.fixture |
|
346 | 325 | def test_repo_group(request): |
|
347 | 326 | """ |
|
348 | 327 | Create a temporary repository group, and destroy it after |
|
349 | 328 | usage automatically |
|
350 | 329 | """ |
|
351 | 330 | fixture = Fixture() |
|
352 | 331 | repogroupid = 'test_repo_group_%s' % int(time.time()) |
|
353 | 332 | repo_group = fixture.create_repo_group(repogroupid) |
|
354 | 333 | |
|
355 | 334 | def _cleanup(): |
|
356 | 335 | fixture.destroy_repo_group(repogroupid) |
|
357 | 336 | |
|
358 | 337 | request.addfinalizer(_cleanup) |
|
359 | 338 | return repo_group |
|
360 | 339 | |
|
361 | 340 | |
|
362 | 341 | @pytest.fixture |
|
363 | 342 | def test_user_group(request): |
|
364 | 343 | """ |
|
365 | 344 | Create a temporary user group, and destroy it after |
|
366 | 345 | usage automatically |
|
367 | 346 | """ |
|
368 | 347 | fixture = Fixture() |
|
369 | 348 | usergroupid = 'test_user_group_%s' % int(time.time()) |
|
370 | 349 | user_group = fixture.create_user_group(usergroupid) |
|
371 | 350 | |
|
372 | 351 | def _cleanup(): |
|
373 | 352 | fixture.destroy_user_group(user_group) |
|
374 | 353 | |
|
375 | 354 | request.addfinalizer(_cleanup) |
|
376 | 355 | return user_group |
|
377 | 356 | |
|
378 | 357 | |
|
379 | 358 | @pytest.fixture(scope='session') |
|
380 | 359 | def test_repo(request): |
|
381 | 360 | container = TestRepoContainer() |
|
382 | 361 | request.addfinalizer(container._cleanup) |
|
383 | 362 | return container |
|
384 | 363 | |
|
385 | 364 | |
|
386 | 365 | class TestRepoContainer(object): |
|
387 | 366 | """ |
|
388 | 367 | Container for test repositories which are used read only. |
|
389 | 368 | |
|
390 | 369 | Repositories will be created on demand and re-used during the lifetime |
|
391 | 370 | of this object. |
|
392 | 371 | |
|
393 | 372 | Usage to get the svn test repository "minimal":: |
|
394 | 373 | |
|
395 | 374 | test_repo = TestContainer() |
|
396 | 375 | repo = test_repo('minimal', 'svn') |
|
397 | 376 | |
|
398 | 377 | """ |
|
399 | 378 | |
|
400 | 379 | dump_extractors = { |
|
401 | 380 | 'git': utils.extract_git_repo_from_dump, |
|
402 | 381 | 'hg': utils.extract_hg_repo_from_dump, |
|
403 | 382 | 'svn': utils.extract_svn_repo_from_dump, |
|
404 | 383 | } |
|
405 | 384 | |
|
406 | 385 | def __init__(self): |
|
407 | 386 | self._cleanup_repos = [] |
|
408 | 387 | self._fixture = Fixture() |
|
409 | 388 | self._repos = {} |
|
410 | 389 | |
|
411 | 390 | def __call__(self, dump_name, backend_alias): |
|
412 | 391 | key = (dump_name, backend_alias) |
|
413 | 392 | if key not in self._repos: |
|
414 | 393 | repo = self._create_repo(dump_name, backend_alias) |
|
415 | 394 | self._repos[key] = repo.repo_id |
|
416 | 395 | return Repository.get(self._repos[key]) |
|
417 | 396 | |
|
418 | 397 | def _create_repo(self, dump_name, backend_alias): |
|
419 | 398 | repo_name = '%s-%s' % (backend_alias, dump_name) |
|
420 | 399 | backend_class = get_backend(backend_alias) |
|
421 | 400 | dump_extractor = self.dump_extractors[backend_alias] |
|
422 | 401 | repo_path = dump_extractor(dump_name, repo_name) |
|
423 | 402 | vcs_repo = backend_class(repo_path) |
|
424 | 403 | repo2db_mapper({repo_name: vcs_repo}) |
|
425 | 404 | repo = RepoModel().get_by_repo_name(repo_name) |
|
426 | 405 | self._cleanup_repos.append(repo_name) |
|
427 | 406 | return repo |
|
428 | 407 | |
|
429 | 408 | def _cleanup(self): |
|
430 | 409 | for repo_name in reversed(self._cleanup_repos): |
|
431 | 410 | self._fixture.destroy_repo(repo_name) |
|
432 | 411 | |
|
433 | 412 | |
|
434 | 413 | @pytest.fixture |
|
435 | 414 | def backend(request, backend_alias, pylonsapp, test_repo): |
|
436 | 415 | """ |
|
437 | 416 | Parametrized fixture which represents a single backend implementation. |
|
438 | 417 | |
|
439 | 418 | It respects the option `--backends` to focus the test run on specific |
|
440 | 419 | backend implementations. |
|
441 | 420 | |
|
442 | 421 | It also supports `pytest.mark.xfail_backends` to mark tests as failing |
|
443 | 422 | for specific backends. This is intended as a utility for incremental |
|
444 | 423 | development of a new backend implementation. |
|
445 | 424 | """ |
|
446 | 425 | if backend_alias not in request.config.getoption('--backends'): |
|
447 | 426 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
448 | 427 | |
|
449 | 428 | utils.check_xfail_backends(request.node, backend_alias) |
|
450 | 429 | utils.check_skip_backends(request.node, backend_alias) |
|
451 | 430 | |
|
452 | 431 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
453 | 432 | backend = Backend( |
|
454 | 433 | alias=backend_alias, |
|
455 | 434 | repo_name=repo_name, |
|
456 | 435 | test_name=request.node.name, |
|
457 | 436 | test_repo_container=test_repo) |
|
458 | 437 | request.addfinalizer(backend.cleanup) |
|
459 | 438 | return backend |
|
460 | 439 | |
|
461 | 440 | |
|
462 | 441 | @pytest.fixture |
|
463 | 442 | def backend_git(request, pylonsapp, test_repo): |
|
464 | 443 | return backend(request, 'git', pylonsapp, test_repo) |
|
465 | 444 | |
|
466 | 445 | |
|
467 | 446 | @pytest.fixture |
|
468 | 447 | def backend_hg(request, pylonsapp, test_repo): |
|
469 | 448 | return backend(request, 'hg', pylonsapp, test_repo) |
|
470 | 449 | |
|
471 | 450 | |
|
472 | 451 | @pytest.fixture |
|
473 | 452 | def backend_svn(request, pylonsapp, test_repo): |
|
474 | 453 | return backend(request, 'svn', pylonsapp, test_repo) |
|
475 | 454 | |
|
476 | 455 | |
|
477 | 456 | @pytest.fixture |
|
478 | 457 | def backend_random(backend_git): |
|
479 | 458 | """ |
|
480 | 459 | Use this to express that your tests need "a backend. |
|
481 | 460 | |
|
482 | 461 | A few of our tests need a backend, so that we can run the code. This |
|
483 | 462 | fixture is intended to be used for such cases. It will pick one of the |
|
484 | 463 | backends and run the tests. |
|
485 | 464 | |
|
486 | 465 | The fixture `backend` would run the test multiple times for each |
|
487 | 466 | available backend which is a pure waste of time if the test is |
|
488 | 467 | independent of the backend type. |
|
489 | 468 | """ |
|
490 | 469 | # TODO: johbo: Change this to pick a random backend |
|
491 | 470 | return backend_git |
|
492 | 471 | |
|
493 | 472 | |
|
494 | 473 | @pytest.fixture |
|
495 | 474 | def backend_stub(backend_git): |
|
496 | 475 | """ |
|
497 | 476 | Use this to express that your tests need a backend stub |
|
498 | 477 | |
|
499 | 478 | TODO: mikhail: Implement a real stub logic instead of returning |
|
500 | 479 | a git backend |
|
501 | 480 | """ |
|
502 | 481 | return backend_git |
|
503 | 482 | |
|
504 | 483 | |
|
505 | 484 | @pytest.fixture |
|
506 | 485 | def repo_stub(backend_stub): |
|
507 | 486 | """ |
|
508 | 487 | Use this to express that your tests need a repository stub |
|
509 | 488 | """ |
|
510 | 489 | return backend_stub.create_repo() |
|
511 | 490 | |
|
512 | 491 | |
|
513 | 492 | class Backend(object): |
|
514 | 493 | """ |
|
515 | 494 | Represents the test configuration for one supported backend |
|
516 | 495 | |
|
517 | 496 | Provides easy access to different test repositories based on |
|
518 | 497 | `__getitem__`. Such repositories will only be created once per test |
|
519 | 498 | session. |
|
520 | 499 | """ |
|
521 | 500 | |
|
522 | 501 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
523 | 502 | _master_repo = None |
|
524 | 503 | _commit_ids = {} |
|
525 | 504 | |
|
526 | 505 | def __init__(self, alias, repo_name, test_name, test_repo_container): |
|
527 | 506 | self.alias = alias |
|
528 | 507 | self.repo_name = repo_name |
|
529 | 508 | self._cleanup_repos = [] |
|
530 | 509 | self._test_name = test_name |
|
531 | 510 | self._test_repo_container = test_repo_container |
|
532 | 511 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or |
|
533 | 512 | # Fixture will survive in the end. |
|
534 | 513 | self._fixture = Fixture() |
|
535 | 514 | |
|
536 | 515 | def __getitem__(self, key): |
|
537 | 516 | return self._test_repo_container(key, self.alias) |
|
538 | 517 | |
|
539 | 518 | @property |
|
540 | 519 | def repo(self): |
|
541 | 520 | """ |
|
542 | 521 | Returns the "current" repository. This is the vcs_test repo or the |
|
543 | 522 | last repo which has been created with `create_repo`. |
|
544 | 523 | """ |
|
545 | 524 | from rhodecode.model.db import Repository |
|
546 | 525 | return Repository.get_by_repo_name(self.repo_name) |
|
547 | 526 | |
|
548 | 527 | @property |
|
549 | 528 | def default_branch_name(self): |
|
550 | 529 | VcsRepository = get_backend(self.alias) |
|
551 | 530 | return VcsRepository.DEFAULT_BRANCH_NAME |
|
552 | 531 | |
|
553 | 532 | @property |
|
554 | 533 | def default_head_id(self): |
|
555 | 534 | """ |
|
556 | 535 | Returns the default head id of the underlying backend. |
|
557 | 536 | |
|
558 | 537 | This will be the default branch name in case the backend does have a |
|
559 | 538 | default branch. In the other cases it will point to a valid head |
|
560 | 539 | which can serve as the base to create a new commit on top of it. |
|
561 | 540 | """ |
|
562 | 541 | vcsrepo = self.repo.scm_instance() |
|
563 | 542 | head_id = ( |
|
564 | 543 | vcsrepo.DEFAULT_BRANCH_NAME or |
|
565 | 544 | vcsrepo.commit_ids[-1]) |
|
566 | 545 | return head_id |
|
567 | 546 | |
|
568 | 547 | @property |
|
569 | 548 | def commit_ids(self): |
|
570 | 549 | """ |
|
571 | 550 | Returns the list of commits for the last created repository |
|
572 | 551 | """ |
|
573 | 552 | return self._commit_ids |
|
574 | 553 | |
|
575 | 554 | def create_master_repo(self, commits): |
|
576 | 555 | """ |
|
577 | 556 | Create a repository and remember it as a template. |
|
578 | 557 | |
|
579 | 558 | This allows to easily create derived repositories to construct |
|
580 | 559 | more complex scenarios for diff, compare and pull requests. |
|
581 | 560 | |
|
582 | 561 | Returns a commit map which maps from commit message to raw_id. |
|
583 | 562 | """ |
|
584 | 563 | self._master_repo = self.create_repo(commits=commits) |
|
585 | 564 | return self._commit_ids |
|
586 | 565 | |
|
587 | 566 | def create_repo( |
|
588 | 567 | self, commits=None, number_of_commits=0, heads=None, |
|
589 | 568 | name_suffix=u'', **kwargs): |
|
590 | 569 | """ |
|
591 | 570 | Create a repository and record it for later cleanup. |
|
592 | 571 | |
|
593 | 572 | :param commits: Optional. A sequence of dict instances. |
|
594 | 573 | Will add a commit per entry to the new repository. |
|
595 | 574 | :param number_of_commits: Optional. If set to a number, this number of |
|
596 | 575 | commits will be added to the new repository. |
|
597 | 576 | :param heads: Optional. Can be set to a sequence of of commit |
|
598 | 577 | names which shall be pulled in from the master repository. |
|
599 | 578 | |
|
600 | 579 | """ |
|
601 | 580 | self.repo_name = self._next_repo_name() + name_suffix |
|
602 | 581 | repo = self._fixture.create_repo( |
|
603 | 582 | self.repo_name, repo_type=self.alias, **kwargs) |
|
604 | 583 | self._cleanup_repos.append(repo.repo_name) |
|
605 | 584 | |
|
606 | 585 | commits = commits or [ |
|
607 | 586 | {'message': 'Commit %s of %s' % (x, self.repo_name)} |
|
608 | 587 | for x in xrange(number_of_commits)] |
|
609 | 588 | self._add_commits_to_repo(repo.scm_instance(), commits) |
|
610 | 589 | if heads: |
|
611 | 590 | self.pull_heads(repo, heads) |
|
612 | 591 | |
|
613 | 592 | return repo |
|
614 | 593 | |
|
615 | 594 | def pull_heads(self, repo, heads): |
|
616 | 595 | """ |
|
617 | 596 | Make sure that repo contains all commits mentioned in `heads` |
|
618 | 597 | """ |
|
619 | 598 | vcsmaster = self._master_repo.scm_instance() |
|
620 | 599 | vcsrepo = repo.scm_instance() |
|
621 | 600 | vcsrepo.config.clear_section('hooks') |
|
622 | 601 | commit_ids = [self._commit_ids[h] for h in heads] |
|
623 | 602 | vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids) |
|
624 | 603 | |
|
625 | 604 | def create_fork(self): |
|
626 | 605 | repo_to_fork = self.repo_name |
|
627 | 606 | self.repo_name = self._next_repo_name() |
|
628 | 607 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) |
|
629 | 608 | self._cleanup_repos.append(self.repo_name) |
|
630 | 609 | return repo |
|
631 | 610 | |
|
632 | 611 | def new_repo_name(self, suffix=u''): |
|
633 | 612 | self.repo_name = self._next_repo_name() + suffix |
|
634 | 613 | self._cleanup_repos.append(self.repo_name) |
|
635 | 614 | return self.repo_name |
|
636 | 615 | |
|
637 | 616 | def _next_repo_name(self): |
|
638 | 617 | return u"%s_%s" % ( |
|
639 | 618 | self.invalid_repo_name.sub(u'_', self._test_name), |
|
640 | 619 | len(self._cleanup_repos)) |
|
641 | 620 | |
|
642 | 621 | def ensure_file(self, filename, content='Test content\n'): |
|
643 | 622 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
644 | 623 | commits = [ |
|
645 | 624 | {'added': [ |
|
646 | 625 | FileNode(filename, content=content), |
|
647 | 626 | ]}, |
|
648 | 627 | ] |
|
649 | 628 | self._add_commits_to_repo(self.repo.scm_instance(), commits) |
|
650 | 629 | |
|
651 | 630 | def enable_downloads(self): |
|
652 | 631 | repo = self.repo |
|
653 | 632 | repo.enable_downloads = True |
|
654 | 633 | Session().add(repo) |
|
655 | 634 | Session().commit() |
|
656 | 635 | |
|
657 | 636 | def cleanup(self): |
|
658 | 637 | for repo_name in reversed(self._cleanup_repos): |
|
659 | 638 | self._fixture.destroy_repo(repo_name) |
|
660 | 639 | |
|
661 | 640 | def _add_commits_to_repo(self, repo, commits): |
|
662 | 641 | commit_ids = _add_commits_to_repo(repo, commits) |
|
663 | 642 | if not commit_ids: |
|
664 | 643 | return |
|
665 | 644 | self._commit_ids = commit_ids |
|
666 | 645 | |
|
667 | 646 | # Creating refs for Git to allow fetching them from remote repository |
|
668 | 647 | if self.alias == 'git': |
|
669 | 648 | refs = {} |
|
670 | 649 | for message in self._commit_ids: |
|
671 | 650 | # TODO: mikhail: do more special chars replacements |
|
672 | 651 | ref_name = 'refs/test-refs/{}'.format( |
|
673 | 652 | message.replace(' ', '')) |
|
674 | 653 | refs[ref_name] = self._commit_ids[message] |
|
675 | 654 | self._create_refs(repo, refs) |
|
676 | 655 | |
|
677 | 656 | def _create_refs(self, repo, refs): |
|
678 | 657 | for ref_name in refs: |
|
679 | 658 | repo.set_refs(ref_name, refs[ref_name]) |
|
680 | 659 | |
|
681 | 660 | |
|
682 | 661 | @pytest.fixture |
|
683 | 662 | def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo): |
|
684 | 663 | """ |
|
685 | 664 | Parametrized fixture which represents a single vcs backend implementation. |
|
686 | 665 | |
|
687 | 666 | See the fixture `backend` for more details. This one implements the same |
|
688 | 667 | concept, but on vcs level. So it does not provide model instances etc. |
|
689 | 668 | |
|
690 | 669 | Parameters are generated dynamically, see :func:`pytest_generate_tests` |
|
691 | 670 | for how this works. |
|
692 | 671 | """ |
|
693 | 672 | if backend_alias not in request.config.getoption('--backends'): |
|
694 | 673 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
695 | 674 | |
|
696 | 675 | utils.check_xfail_backends(request.node, backend_alias) |
|
697 | 676 | utils.check_skip_backends(request.node, backend_alias) |
|
698 | 677 | |
|
699 | 678 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
700 | 679 | repo_path = os.path.join(tests_tmp_path, repo_name) |
|
701 | 680 | backend = VcsBackend( |
|
702 | 681 | alias=backend_alias, |
|
703 | 682 | repo_path=repo_path, |
|
704 | 683 | test_name=request.node.name, |
|
705 | 684 | test_repo_container=test_repo) |
|
706 | 685 | request.addfinalizer(backend.cleanup) |
|
707 | 686 | return backend |
|
708 | 687 | |
|
709 | 688 | |
|
710 | 689 | @pytest.fixture |
|
711 | 690 | def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo): |
|
712 | 691 | return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo) |
|
713 | 692 | |
|
714 | 693 | |
|
715 | 694 | @pytest.fixture |
|
716 | 695 | def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo): |
|
717 | 696 | return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo) |
|
718 | 697 | |
|
719 | 698 | |
|
720 | 699 | @pytest.fixture |
|
721 | 700 | def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo): |
|
722 | 701 | return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo) |
|
723 | 702 | |
|
724 | 703 | |
|
725 | 704 | @pytest.fixture |
|
726 | 705 | def vcsbackend_random(vcsbackend_git): |
|
727 | 706 | """ |
|
728 | 707 | Use this to express that your tests need "a vcsbackend". |
|
729 | 708 | |
|
730 | 709 | The fixture `vcsbackend` would run the test multiple times for each |
|
731 | 710 | available vcs backend which is a pure waste of time if the test is |
|
732 | 711 | independent of the vcs backend type. |
|
733 | 712 | """ |
|
734 | 713 | # TODO: johbo: Change this to pick a random backend |
|
735 | 714 | return vcsbackend_git |
|
736 | 715 | |
|
737 | 716 | |
|
738 | 717 | @pytest.fixture |
|
739 | 718 | def vcsbackend_stub(vcsbackend_git): |
|
740 | 719 | """ |
|
741 | 720 | Use this to express that your test just needs a stub of a vcsbackend. |
|
742 | 721 | |
|
743 | 722 | Plan is to eventually implement an in-memory stub to speed tests up. |
|
744 | 723 | """ |
|
745 | 724 | return vcsbackend_git |
|
746 | 725 | |
|
747 | 726 | |
|
748 | 727 | class VcsBackend(object): |
|
749 | 728 | """ |
|
750 | 729 | Represents the test configuration for one supported vcs backend. |
|
751 | 730 | """ |
|
752 | 731 | |
|
753 | 732 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
754 | 733 | |
|
755 | 734 | def __init__(self, alias, repo_path, test_name, test_repo_container): |
|
756 | 735 | self.alias = alias |
|
757 | 736 | self._repo_path = repo_path |
|
758 | 737 | self._cleanup_repos = [] |
|
759 | 738 | self._test_name = test_name |
|
760 | 739 | self._test_repo_container = test_repo_container |
|
761 | 740 | |
|
762 | 741 | def __getitem__(self, key): |
|
763 | 742 | return self._test_repo_container(key, self.alias).scm_instance() |
|
764 | 743 | |
|
765 | 744 | @property |
|
766 | 745 | def repo(self): |
|
767 | 746 | """ |
|
768 | 747 | Returns the "current" repository. This is the vcs_test repo of the last |
|
769 | 748 | repo which has been created. |
|
770 | 749 | """ |
|
771 | 750 | Repository = get_backend(self.alias) |
|
772 | 751 | return Repository(self._repo_path) |
|
773 | 752 | |
|
774 | 753 | @property |
|
775 | 754 | def backend(self): |
|
776 | 755 | """ |
|
777 | 756 | Returns the backend implementation class. |
|
778 | 757 | """ |
|
779 | 758 | return get_backend(self.alias) |
|
780 | 759 | |
|
781 | 760 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None): |
|
782 | 761 | repo_name = self._next_repo_name() |
|
783 | 762 | self._repo_path = get_new_dir(repo_name) |
|
784 | 763 | repo_class = get_backend(self.alias) |
|
785 | 764 | src_url = None |
|
786 | 765 | if _clone_repo: |
|
787 | 766 | src_url = _clone_repo.path |
|
788 | 767 | repo = repo_class(self._repo_path, create=True, src_url=src_url) |
|
789 | 768 | self._cleanup_repos.append(repo) |
|
790 | 769 | |
|
791 | 770 | commits = commits or [ |
|
792 | 771 | {'message': 'Commit %s of %s' % (x, repo_name)} |
|
793 | 772 | for x in xrange(number_of_commits)] |
|
794 | 773 | _add_commits_to_repo(repo, commits) |
|
795 | 774 | return repo |
|
796 | 775 | |
|
797 | 776 | def clone_repo(self, repo): |
|
798 | 777 | return self.create_repo(_clone_repo=repo) |
|
799 | 778 | |
|
800 | 779 | def cleanup(self): |
|
801 | 780 | for repo in self._cleanup_repos: |
|
802 | 781 | shutil.rmtree(repo.path) |
|
803 | 782 | |
|
804 | 783 | def new_repo_path(self): |
|
805 | 784 | repo_name = self._next_repo_name() |
|
806 | 785 | self._repo_path = get_new_dir(repo_name) |
|
807 | 786 | return self._repo_path |
|
808 | 787 | |
|
809 | 788 | def _next_repo_name(self): |
|
810 | 789 | return "%s_%s" % ( |
|
811 | 790 | self.invalid_repo_name.sub('_', self._test_name), |
|
812 | 791 | len(self._cleanup_repos)) |
|
813 | 792 | |
|
814 | 793 | def add_file(self, repo, filename, content='Test content\n'): |
|
815 | 794 | imc = repo.in_memory_commit |
|
816 | 795 | imc.add(FileNode(filename, content=content)) |
|
817 | 796 | imc.commit( |
|
818 | 797 | message=u'Automatic commit from vcsbackend fixture', |
|
819 | 798 | author=u'Automatic') |
|
820 | 799 | |
|
821 | 800 | def ensure_file(self, filename, content='Test content\n'): |
|
822 | 801 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
823 | 802 | self.add_file(self.repo, filename, content) |
|
824 | 803 | |
|
825 | 804 | |
|
826 | 805 | def _add_commits_to_repo(vcs_repo, commits): |
|
827 | 806 | commit_ids = {} |
|
828 | 807 | if not commits: |
|
829 | 808 | return commit_ids |
|
830 | 809 | |
|
831 | 810 | imc = vcs_repo.in_memory_commit |
|
832 | 811 | commit = None |
|
833 | 812 | |
|
834 | 813 | for idx, commit in enumerate(commits): |
|
835 | 814 | message = unicode(commit.get('message', 'Commit %s' % idx)) |
|
836 | 815 | |
|
837 | 816 | for node in commit.get('added', []): |
|
838 | 817 | imc.add(FileNode(node.path, content=node.content)) |
|
839 | 818 | for node in commit.get('changed', []): |
|
840 | 819 | imc.change(FileNode(node.path, content=node.content)) |
|
841 | 820 | for node in commit.get('removed', []): |
|
842 | 821 | imc.remove(FileNode(node.path)) |
|
843 | 822 | |
|
844 | 823 | parents = [ |
|
845 | 824 | vcs_repo.get_commit(commit_id=commit_ids[p]) |
|
846 | 825 | for p in commit.get('parents', [])] |
|
847 | 826 | |
|
848 | 827 | operations = ('added', 'changed', 'removed') |
|
849 | 828 | if not any((commit.get(o) for o in operations)): |
|
850 | 829 | imc.add(FileNode('file_%s' % idx, content=message)) |
|
851 | 830 | |
|
852 | 831 | commit = imc.commit( |
|
853 | 832 | message=message, |
|
854 | 833 | author=unicode(commit.get('author', 'Automatic')), |
|
855 | 834 | date=commit.get('date'), |
|
856 | 835 | branch=commit.get('branch'), |
|
857 | 836 | parents=parents) |
|
858 | 837 | |
|
859 | 838 | commit_ids[commit.message] = commit.raw_id |
|
860 | 839 | |
|
861 | 840 | return commit_ids |
|
862 | 841 | |
|
863 | 842 | |
|
864 | 843 | @pytest.fixture |
|
865 | 844 | def reposerver(request): |
|
866 | 845 | """ |
|
867 | 846 | Allows to serve a backend repository |
|
868 | 847 | """ |
|
869 | 848 | |
|
870 | 849 | repo_server = RepoServer() |
|
871 | 850 | request.addfinalizer(repo_server.cleanup) |
|
872 | 851 | return repo_server |
|
873 | 852 | |
|
874 | 853 | |
|
875 | 854 | class RepoServer(object): |
|
876 | 855 | """ |
|
877 | 856 | Utility to serve a local repository for the duration of a test case. |
|
878 | 857 | |
|
879 | 858 | Supports only Subversion so far. |
|
880 | 859 | """ |
|
881 | 860 | |
|
882 | 861 | url = None |
|
883 | 862 | |
|
884 | 863 | def __init__(self): |
|
885 | 864 | self._cleanup_servers = [] |
|
886 | 865 | |
|
887 | 866 | def serve(self, vcsrepo): |
|
888 | 867 | if vcsrepo.alias != 'svn': |
|
889 | 868 | raise TypeError("Backend %s not supported" % vcsrepo.alias) |
|
890 | 869 | |
|
891 | 870 | proc = subprocess32.Popen( |
|
892 | 871 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', |
|
893 | 872 | '--root', vcsrepo.path]) |
|
894 | 873 | self._cleanup_servers.append(proc) |
|
895 | 874 | self.url = 'svn://localhost' |
|
896 | 875 | |
|
897 | 876 | def cleanup(self): |
|
898 | 877 | for proc in self._cleanup_servers: |
|
899 | 878 | proc.terminate() |
|
900 | 879 | |
|
901 | 880 | |
|
902 | 881 | @pytest.fixture |
|
903 | 882 | def pr_util(backend, request): |
|
904 | 883 | """ |
|
905 | 884 | Utility for tests of models and for functional tests around pull requests. |
|
906 | 885 | |
|
907 | 886 | It gives an instance of :class:`PRTestUtility` which provides various |
|
908 | 887 | utility methods around one pull request. |
|
909 | 888 | |
|
910 | 889 | This fixture uses `backend` and inherits its parameterization. |
|
911 | 890 | """ |
|
912 | 891 | |
|
913 | 892 | util = PRTestUtility(backend) |
|
914 | 893 | |
|
915 | 894 | @request.addfinalizer |
|
916 | 895 | def cleanup(): |
|
917 | 896 | util.cleanup() |
|
918 | 897 | |
|
919 | 898 | return util |
|
920 | 899 | |
|
921 | 900 | |
|
922 | 901 | class PRTestUtility(object): |
|
923 | 902 | |
|
924 | 903 | pull_request = None |
|
925 | 904 | pull_request_id = None |
|
926 | 905 | mergeable_patcher = None |
|
927 | 906 | mergeable_mock = None |
|
928 | 907 | notification_patcher = None |
|
929 | 908 | |
|
930 | 909 | def __init__(self, backend): |
|
931 | 910 | self.backend = backend |
|
932 | 911 | |
|
933 | 912 | def create_pull_request( |
|
934 | 913 | self, commits=None, target_head=None, source_head=None, |
|
935 | 914 | revisions=None, approved=False, author=None, mergeable=False, |
|
936 | 915 | enable_notifications=True, name_suffix=u'', reviewers=None, |
|
937 | 916 | title=u"Test", description=u"Description"): |
|
938 | 917 | self.set_mergeable(mergeable) |
|
939 | 918 | if not enable_notifications: |
|
940 | 919 | # mock notification side effect |
|
941 | 920 | self.notification_patcher = mock.patch( |
|
942 | 921 | 'rhodecode.model.notification.NotificationModel.create') |
|
943 | 922 | self.notification_patcher.start() |
|
944 | 923 | |
|
945 | 924 | if not self.pull_request: |
|
946 | 925 | if not commits: |
|
947 | 926 | commits = [ |
|
948 | 927 | {'message': 'c1'}, |
|
949 | 928 | {'message': 'c2'}, |
|
950 | 929 | {'message': 'c3'}, |
|
951 | 930 | ] |
|
952 | 931 | target_head = 'c1' |
|
953 | 932 | source_head = 'c2' |
|
954 | 933 | revisions = ['c2'] |
|
955 | 934 | |
|
956 | 935 | self.commit_ids = self.backend.create_master_repo(commits) |
|
957 | 936 | self.target_repository = self.backend.create_repo( |
|
958 | 937 | heads=[target_head], name_suffix=name_suffix) |
|
959 | 938 | self.source_repository = self.backend.create_repo( |
|
960 | 939 | heads=[source_head], name_suffix=name_suffix) |
|
961 | 940 | self.author = author or UserModel().get_by_username( |
|
962 | 941 | TEST_USER_ADMIN_LOGIN) |
|
963 | 942 | |
|
964 | 943 | model = PullRequestModel() |
|
965 | 944 | self.create_parameters = { |
|
966 | 945 | 'created_by': self.author, |
|
967 | 946 | 'source_repo': self.source_repository.repo_name, |
|
968 | 947 | 'source_ref': self._default_branch_reference(source_head), |
|
969 | 948 | 'target_repo': self.target_repository.repo_name, |
|
970 | 949 | 'target_ref': self._default_branch_reference(target_head), |
|
971 | 950 | 'revisions': [self.commit_ids[r] for r in revisions], |
|
972 | 951 | 'reviewers': reviewers or self._get_reviewers(), |
|
973 | 952 | 'title': title, |
|
974 | 953 | 'description': description, |
|
975 | 954 | } |
|
976 | 955 | self.pull_request = model.create(**self.create_parameters) |
|
977 | 956 | assert model.get_versions(self.pull_request) == [] |
|
978 | 957 | |
|
979 | 958 | self.pull_request_id = self.pull_request.pull_request_id |
|
980 | 959 | |
|
981 | 960 | if approved: |
|
982 | 961 | self.approve() |
|
983 | 962 | |
|
984 | 963 | Session().add(self.pull_request) |
|
985 | 964 | Session().commit() |
|
986 | 965 | |
|
987 | 966 | return self.pull_request |
|
988 | 967 | |
|
989 | 968 | def approve(self): |
|
990 | 969 | self.create_status_votes( |
|
991 | 970 | ChangesetStatus.STATUS_APPROVED, |
|
992 | 971 | *self.pull_request.reviewers) |
|
993 | 972 | |
|
994 | 973 | def close(self): |
|
995 | 974 | PullRequestModel().close_pull_request(self.pull_request, self.author) |
|
996 | 975 | |
|
997 | 976 | def _default_branch_reference(self, commit_message): |
|
998 | 977 | reference = '%s:%s:%s' % ( |
|
999 | 978 | 'branch', |
|
1000 | 979 | self.backend.default_branch_name, |
|
1001 | 980 | self.commit_ids[commit_message]) |
|
1002 | 981 | return reference |
|
1003 | 982 | |
|
1004 | 983 | def _get_reviewers(self): |
|
1005 | 984 | model = UserModel() |
|
1006 | 985 | return [ |
|
1007 | 986 | model.get_by_username(TEST_USER_REGULAR_LOGIN), |
|
1008 | 987 | model.get_by_username(TEST_USER_REGULAR2_LOGIN), |
|
1009 | 988 | ] |
|
1010 | 989 | |
|
1011 | 990 | def update_source_repository(self, head=None): |
|
1012 | 991 | heads = [head or 'c3'] |
|
1013 | 992 | self.backend.pull_heads(self.source_repository, heads=heads) |
|
1014 | 993 | |
|
1015 | 994 | def add_one_commit(self, head=None): |
|
1016 | 995 | self.update_source_repository(head=head) |
|
1017 | 996 | old_commit_ids = set(self.pull_request.revisions) |
|
1018 | 997 | PullRequestModel().update_commits(self.pull_request) |
|
1019 | 998 | commit_ids = set(self.pull_request.revisions) |
|
1020 | 999 | new_commit_ids = commit_ids - old_commit_ids |
|
1021 | 1000 | assert len(new_commit_ids) == 1 |
|
1022 | 1001 | return new_commit_ids.pop() |
|
1023 | 1002 | |
|
1024 | 1003 | def remove_one_commit(self): |
|
1025 | 1004 | assert len(self.pull_request.revisions) == 2 |
|
1026 | 1005 | source_vcs = self.source_repository.scm_instance() |
|
1027 | 1006 | removed_commit_id = source_vcs.commit_ids[-1] |
|
1028 | 1007 | |
|
1029 | 1008 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, |
|
1030 | 1009 | # remove the if once that's sorted out. |
|
1031 | 1010 | if self.backend.alias == "git": |
|
1032 | 1011 | kwargs = {'branch_name': self.backend.default_branch_name} |
|
1033 | 1012 | else: |
|
1034 | 1013 | kwargs = {} |
|
1035 | 1014 | source_vcs.strip(removed_commit_id, **kwargs) |
|
1036 | 1015 | |
|
1037 | 1016 | PullRequestModel().update_commits(self.pull_request) |
|
1038 | 1017 | assert len(self.pull_request.revisions) == 1 |
|
1039 | 1018 | return removed_commit_id |
|
1040 | 1019 | |
|
1041 | 1020 | def create_comment(self, linked_to=None): |
|
1042 | 1021 | comment = CommentsModel().create( |
|
1043 | 1022 | text=u"Test comment", |
|
1044 | 1023 | repo=self.target_repository.repo_name, |
|
1045 | 1024 | user=self.author, |
|
1046 | 1025 | pull_request=self.pull_request) |
|
1047 | 1026 | assert comment.pull_request_version_id is None |
|
1048 | 1027 | |
|
1049 | 1028 | if linked_to: |
|
1050 | 1029 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1051 | 1030 | |
|
1052 | 1031 | return comment |
|
1053 | 1032 | |
|
1054 | 1033 | def create_inline_comment( |
|
1055 | 1034 | self, linked_to=None, line_no=u'n1', file_path='file_1'): |
|
1056 | 1035 | comment = CommentsModel().create( |
|
1057 | 1036 | text=u"Test comment", |
|
1058 | 1037 | repo=self.target_repository.repo_name, |
|
1059 | 1038 | user=self.author, |
|
1060 | 1039 | line_no=line_no, |
|
1061 | 1040 | f_path=file_path, |
|
1062 | 1041 | pull_request=self.pull_request) |
|
1063 | 1042 | assert comment.pull_request_version_id is None |
|
1064 | 1043 | |
|
1065 | 1044 | if linked_to: |
|
1066 | 1045 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1067 | 1046 | |
|
1068 | 1047 | return comment |
|
1069 | 1048 | |
|
1070 | 1049 | def create_version_of_pull_request(self): |
|
1071 | 1050 | pull_request = self.create_pull_request() |
|
1072 | 1051 | version = PullRequestModel()._create_version_from_snapshot( |
|
1073 | 1052 | pull_request) |
|
1074 | 1053 | return version |
|
1075 | 1054 | |
|
1076 | 1055 | def create_status_votes(self, status, *reviewers): |
|
1077 | 1056 | for reviewer in reviewers: |
|
1078 | 1057 | ChangesetStatusModel().set_status( |
|
1079 | 1058 | repo=self.pull_request.target_repo, |
|
1080 | 1059 | status=status, |
|
1081 | 1060 | user=reviewer.user_id, |
|
1082 | 1061 | pull_request=self.pull_request) |
|
1083 | 1062 | |
|
1084 | 1063 | def set_mergeable(self, value): |
|
1085 | 1064 | if not self.mergeable_patcher: |
|
1086 | 1065 | self.mergeable_patcher = mock.patch.object( |
|
1087 | 1066 | VcsSettingsModel, 'get_general_settings') |
|
1088 | 1067 | self.mergeable_mock = self.mergeable_patcher.start() |
|
1089 | 1068 | self.mergeable_mock.return_value = { |
|
1090 | 1069 | 'rhodecode_pr_merge_enabled': value} |
|
1091 | 1070 | |
|
1092 | 1071 | def cleanup(self): |
|
1093 | 1072 | # In case the source repository is already cleaned up, the pull |
|
1094 | 1073 | # request will already be deleted. |
|
1095 | 1074 | pull_request = PullRequest().get(self.pull_request_id) |
|
1096 | 1075 | if pull_request: |
|
1097 | 1076 | PullRequestModel().delete(pull_request) |
|
1098 | 1077 | Session().commit() |
|
1099 | 1078 | |
|
1100 | 1079 | if self.notification_patcher: |
|
1101 | 1080 | self.notification_patcher.stop() |
|
1102 | 1081 | |
|
1103 | 1082 | if self.mergeable_patcher: |
|
1104 | 1083 | self.mergeable_patcher.stop() |
|
1105 | 1084 | |
|
1106 | 1085 | |
|
1107 | 1086 | @pytest.fixture |
|
1108 | 1087 | def user_admin(pylonsapp): |
|
1109 | 1088 | """ |
|
1110 | 1089 | Provides the default admin test user as an instance of `db.User`. |
|
1111 | 1090 | """ |
|
1112 | 1091 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1113 | 1092 | return user |
|
1114 | 1093 | |
|
1115 | 1094 | |
|
1116 | 1095 | @pytest.fixture |
|
1117 | 1096 | def user_regular(pylonsapp): |
|
1118 | 1097 | """ |
|
1119 | 1098 | Provides the default regular test user as an instance of `db.User`. |
|
1120 | 1099 | """ |
|
1121 | 1100 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
1122 | 1101 | return user |
|
1123 | 1102 | |
|
1124 | 1103 | |
|
1125 | 1104 | @pytest.fixture |
|
1126 | 1105 | def user_util(request, pylonsapp): |
|
1127 | 1106 | """ |
|
1128 | 1107 | Provides a wired instance of `UserUtility` with integrated cleanup. |
|
1129 | 1108 | """ |
|
1130 | 1109 | utility = UserUtility(test_name=request.node.name) |
|
1131 | 1110 | request.addfinalizer(utility.cleanup) |
|
1132 | 1111 | return utility |
|
1133 | 1112 | |
|
1134 | 1113 | |
|
1135 | 1114 | # TODO: johbo: Split this up into utilities per domain or something similar |
|
1136 | 1115 | class UserUtility(object): |
|
1137 | 1116 | |
|
1138 | 1117 | def __init__(self, test_name="test"): |
|
1139 | 1118 | self._test_name = self._sanitize_name(test_name) |
|
1140 | 1119 | self.fixture = Fixture() |
|
1141 | 1120 | self.repo_group_ids = [] |
|
1142 | 1121 | self.repos_ids = [] |
|
1143 | 1122 | self.user_ids = [] |
|
1144 | 1123 | self.user_group_ids = [] |
|
1145 | 1124 | self.user_repo_permission_ids = [] |
|
1146 | 1125 | self.user_group_repo_permission_ids = [] |
|
1147 | 1126 | self.user_repo_group_permission_ids = [] |
|
1148 | 1127 | self.user_group_repo_group_permission_ids = [] |
|
1149 | 1128 | self.user_user_group_permission_ids = [] |
|
1150 | 1129 | self.user_group_user_group_permission_ids = [] |
|
1151 | 1130 | self.user_permissions = [] |
|
1152 | 1131 | |
|
1153 | 1132 | def _sanitize_name(self, name): |
|
1154 | 1133 | for char in ['[', ']']: |
|
1155 | 1134 | name = name.replace(char, '_') |
|
1156 | 1135 | return name |
|
1157 | 1136 | |
|
1158 | 1137 | def create_repo_group( |
|
1159 | 1138 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): |
|
1160 | 1139 | group_name = "{prefix}_repogroup_{count}".format( |
|
1161 | 1140 | prefix=self._test_name, |
|
1162 | 1141 | count=len(self.repo_group_ids)) |
|
1163 | 1142 | repo_group = self.fixture.create_repo_group( |
|
1164 | 1143 | group_name, cur_user=owner) |
|
1165 | 1144 | if auto_cleanup: |
|
1166 | 1145 | self.repo_group_ids.append(repo_group.group_id) |
|
1167 | 1146 | return repo_group |
|
1168 | 1147 | |
|
1169 | 1148 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True): |
|
1170 | 1149 | repo_name = "{prefix}_repository_{count}".format( |
|
1171 | 1150 | prefix=self._test_name, |
|
1172 | 1151 | count=len(self.repos_ids)) |
|
1173 | 1152 | |
|
1174 | 1153 | repository = self.fixture.create_repo( |
|
1175 | 1154 | repo_name, cur_user=owner, repo_group=parent) |
|
1176 | 1155 | if auto_cleanup: |
|
1177 | 1156 | self.repos_ids.append(repository.repo_id) |
|
1178 | 1157 | return repository |
|
1179 | 1158 | |
|
1180 | 1159 | def create_user(self, auto_cleanup=True, **kwargs): |
|
1181 | 1160 | user_name = "{prefix}_user_{count}".format( |
|
1182 | 1161 | prefix=self._test_name, |
|
1183 | 1162 | count=len(self.user_ids)) |
|
1184 | 1163 | user = self.fixture.create_user(user_name, **kwargs) |
|
1185 | 1164 | if auto_cleanup: |
|
1186 | 1165 | self.user_ids.append(user.user_id) |
|
1187 | 1166 | return user |
|
1188 | 1167 | |
|
1189 | 1168 | def create_user_with_group(self): |
|
1190 | 1169 | user = self.create_user() |
|
1191 | 1170 | user_group = self.create_user_group(members=[user]) |
|
1192 | 1171 | return user, user_group |
|
1193 | 1172 | |
|
1194 | 1173 | def create_user_group(self, members=None, auto_cleanup=True, **kwargs): |
|
1195 | 1174 | group_name = "{prefix}_usergroup_{count}".format( |
|
1196 | 1175 | prefix=self._test_name, |
|
1197 | 1176 | count=len(self.user_group_ids)) |
|
1198 | 1177 | user_group = self.fixture.create_user_group(group_name, **kwargs) |
|
1199 | 1178 | if auto_cleanup: |
|
1200 | 1179 | self.user_group_ids.append(user_group.users_group_id) |
|
1201 | 1180 | if members: |
|
1202 | 1181 | for user in members: |
|
1203 | 1182 | UserGroupModel().add_user_to_group(user_group, user) |
|
1204 | 1183 | return user_group |
|
1205 | 1184 | |
|
1206 | 1185 | def grant_user_permission(self, user_name, permission_name): |
|
1207 | 1186 | self._inherit_default_user_permissions(user_name, False) |
|
1208 | 1187 | self.user_permissions.append((user_name, permission_name)) |
|
1209 | 1188 | |
|
1210 | 1189 | def grant_user_permission_to_repo_group( |
|
1211 | 1190 | self, repo_group, user, permission_name): |
|
1212 | 1191 | permission = RepoGroupModel().grant_user_permission( |
|
1213 | 1192 | repo_group, user, permission_name) |
|
1214 | 1193 | self.user_repo_group_permission_ids.append( |
|
1215 | 1194 | (repo_group.group_id, user.user_id)) |
|
1216 | 1195 | return permission |
|
1217 | 1196 | |
|
1218 | 1197 | def grant_user_group_permission_to_repo_group( |
|
1219 | 1198 | self, repo_group, user_group, permission_name): |
|
1220 | 1199 | permission = RepoGroupModel().grant_user_group_permission( |
|
1221 | 1200 | repo_group, user_group, permission_name) |
|
1222 | 1201 | self.user_group_repo_group_permission_ids.append( |
|
1223 | 1202 | (repo_group.group_id, user_group.users_group_id)) |
|
1224 | 1203 | return permission |
|
1225 | 1204 | |
|
1226 | 1205 | def grant_user_permission_to_repo( |
|
1227 | 1206 | self, repo, user, permission_name): |
|
1228 | 1207 | permission = RepoModel().grant_user_permission( |
|
1229 | 1208 | repo, user, permission_name) |
|
1230 | 1209 | self.user_repo_permission_ids.append( |
|
1231 | 1210 | (repo.repo_id, user.user_id)) |
|
1232 | 1211 | return permission |
|
1233 | 1212 | |
|
1234 | 1213 | def grant_user_group_permission_to_repo( |
|
1235 | 1214 | self, repo, user_group, permission_name): |
|
1236 | 1215 | permission = RepoModel().grant_user_group_permission( |
|
1237 | 1216 | repo, user_group, permission_name) |
|
1238 | 1217 | self.user_group_repo_permission_ids.append( |
|
1239 | 1218 | (repo.repo_id, user_group.users_group_id)) |
|
1240 | 1219 | return permission |
|
1241 | 1220 | |
|
1242 | 1221 | def grant_user_permission_to_user_group( |
|
1243 | 1222 | self, target_user_group, user, permission_name): |
|
1244 | 1223 | permission = UserGroupModel().grant_user_permission( |
|
1245 | 1224 | target_user_group, user, permission_name) |
|
1246 | 1225 | self.user_user_group_permission_ids.append( |
|
1247 | 1226 | (target_user_group.users_group_id, user.user_id)) |
|
1248 | 1227 | return permission |
|
1249 | 1228 | |
|
1250 | 1229 | def grant_user_group_permission_to_user_group( |
|
1251 | 1230 | self, target_user_group, user_group, permission_name): |
|
1252 | 1231 | permission = UserGroupModel().grant_user_group_permission( |
|
1253 | 1232 | target_user_group, user_group, permission_name) |
|
1254 | 1233 | self.user_group_user_group_permission_ids.append( |
|
1255 | 1234 | (target_user_group.users_group_id, user_group.users_group_id)) |
|
1256 | 1235 | return permission |
|
1257 | 1236 | |
|
1258 | 1237 | def revoke_user_permission(self, user_name, permission_name): |
|
1259 | 1238 | self._inherit_default_user_permissions(user_name, True) |
|
1260 | 1239 | UserModel().revoke_perm(user_name, permission_name) |
|
1261 | 1240 | |
|
1262 | 1241 | def _inherit_default_user_permissions(self, user_name, value): |
|
1263 | 1242 | user = UserModel().get_by_username(user_name) |
|
1264 | 1243 | user.inherit_default_permissions = value |
|
1265 | 1244 | Session().add(user) |
|
1266 | 1245 | Session().commit() |
|
1267 | 1246 | |
|
1268 | 1247 | def cleanup(self): |
|
1269 | 1248 | self._cleanup_permissions() |
|
1270 | 1249 | self._cleanup_repos() |
|
1271 | 1250 | self._cleanup_repo_groups() |
|
1272 | 1251 | self._cleanup_user_groups() |
|
1273 | 1252 | self._cleanup_users() |
|
1274 | 1253 | |
|
1275 | 1254 | def _cleanup_permissions(self): |
|
1276 | 1255 | if self.user_permissions: |
|
1277 | 1256 | for user_name, permission_name in self.user_permissions: |
|
1278 | 1257 | self.revoke_user_permission(user_name, permission_name) |
|
1279 | 1258 | |
|
1280 | 1259 | for permission in self.user_repo_permission_ids: |
|
1281 | 1260 | RepoModel().revoke_user_permission(*permission) |
|
1282 | 1261 | |
|
1283 | 1262 | for permission in self.user_group_repo_permission_ids: |
|
1284 | 1263 | RepoModel().revoke_user_group_permission(*permission) |
|
1285 | 1264 | |
|
1286 | 1265 | for permission in self.user_repo_group_permission_ids: |
|
1287 | 1266 | RepoGroupModel().revoke_user_permission(*permission) |
|
1288 | 1267 | |
|
1289 | 1268 | for permission in self.user_group_repo_group_permission_ids: |
|
1290 | 1269 | RepoGroupModel().revoke_user_group_permission(*permission) |
|
1291 | 1270 | |
|
1292 | 1271 | for permission in self.user_user_group_permission_ids: |
|
1293 | 1272 | UserGroupModel().revoke_user_permission(*permission) |
|
1294 | 1273 | |
|
1295 | 1274 | for permission in self.user_group_user_group_permission_ids: |
|
1296 | 1275 | UserGroupModel().revoke_user_group_permission(*permission) |
|
1297 | 1276 | |
|
1298 | 1277 | def _cleanup_repo_groups(self): |
|
1299 | 1278 | def _repo_group_compare(first_group_id, second_group_id): |
|
1300 | 1279 | """ |
|
1301 | 1280 | Gives higher priority to the groups with the most complex paths |
|
1302 | 1281 | """ |
|
1303 | 1282 | first_group = RepoGroup.get(first_group_id) |
|
1304 | 1283 | second_group = RepoGroup.get(second_group_id) |
|
1305 | 1284 | first_group_parts = ( |
|
1306 | 1285 | len(first_group.group_name.split('/')) if first_group else 0) |
|
1307 | 1286 | second_group_parts = ( |
|
1308 | 1287 | len(second_group.group_name.split('/')) if second_group else 0) |
|
1309 | 1288 | return cmp(second_group_parts, first_group_parts) |
|
1310 | 1289 | |
|
1311 | 1290 | sorted_repo_group_ids = sorted( |
|
1312 | 1291 | self.repo_group_ids, cmp=_repo_group_compare) |
|
1313 | 1292 | for repo_group_id in sorted_repo_group_ids: |
|
1314 | 1293 | self.fixture.destroy_repo_group(repo_group_id) |
|
1315 | 1294 | |
|
1316 | 1295 | def _cleanup_repos(self): |
|
1317 | 1296 | sorted_repos_ids = sorted(self.repos_ids) |
|
1318 | 1297 | for repo_id in sorted_repos_ids: |
|
1319 | 1298 | self.fixture.destroy_repo(repo_id) |
|
1320 | 1299 | |
|
1321 | 1300 | def _cleanup_user_groups(self): |
|
1322 | 1301 | def _user_group_compare(first_group_id, second_group_id): |
|
1323 | 1302 | """ |
|
1324 | 1303 | Gives higher priority to the groups with the most complex paths |
|
1325 | 1304 | """ |
|
1326 | 1305 | first_group = UserGroup.get(first_group_id) |
|
1327 | 1306 | second_group = UserGroup.get(second_group_id) |
|
1328 | 1307 | first_group_parts = ( |
|
1329 | 1308 | len(first_group.users_group_name.split('/')) |
|
1330 | 1309 | if first_group else 0) |
|
1331 | 1310 | second_group_parts = ( |
|
1332 | 1311 | len(second_group.users_group_name.split('/')) |
|
1333 | 1312 | if second_group else 0) |
|
1334 | 1313 | return cmp(second_group_parts, first_group_parts) |
|
1335 | 1314 | |
|
1336 | 1315 | sorted_user_group_ids = sorted( |
|
1337 | 1316 | self.user_group_ids, cmp=_user_group_compare) |
|
1338 | 1317 | for user_group_id in sorted_user_group_ids: |
|
1339 | 1318 | self.fixture.destroy_user_group(user_group_id) |
|
1340 | 1319 | |
|
1341 | 1320 | def _cleanup_users(self): |
|
1342 | 1321 | for user_id in self.user_ids: |
|
1343 | 1322 | self.fixture.destroy_user(user_id) |
|
1344 | 1323 | |
|
1345 | 1324 | |
|
1346 | 1325 | # TODO: Think about moving this into a pytest-pyro package and make it a |
|
1347 | 1326 | # pytest plugin |
|
1348 | 1327 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) |
|
1349 | 1328 | def pytest_runtest_makereport(item, call): |
|
1350 | 1329 | """ |
|
1351 | 1330 | Adding the remote traceback if the exception has this information. |
|
1352 | 1331 | |
|
1353 |
|
|
|
1332 | VCSServer attaches this information as the attribute `_vcs_server_traceback` | |
|
1354 | 1333 | to the exception instance. |
|
1355 | 1334 | """ |
|
1356 | 1335 | outcome = yield |
|
1357 | 1336 | report = outcome.get_result() |
|
1358 | 1337 | if call.excinfo: |
|
1359 | 1338 | _add_vcsserver_remote_traceback(report, call.excinfo.value) |
|
1360 | 1339 | |
|
1361 | 1340 | |
|
1362 | 1341 | def _add_vcsserver_remote_traceback(report, exc): |
|
1363 | 1342 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) |
|
1364 | 1343 | |
|
1365 | 1344 | if vcsserver_traceback: |
|
1366 | 1345 | section = 'VCSServer remote traceback ' + report.when |
|
1367 | 1346 | report.sections.append((section, vcsserver_traceback)) |
|
1368 | 1347 | |
|
1369 | 1348 | |
|
1370 | 1349 | @pytest.fixture(scope='session') |
|
1371 | 1350 | def testrun(): |
|
1372 | 1351 | return { |
|
1373 | 1352 | 'uuid': uuid.uuid4(), |
|
1374 | 1353 | 'start': datetime.datetime.utcnow().isoformat(), |
|
1375 | 1354 | 'timestamp': int(time.time()), |
|
1376 | 1355 | } |
|
1377 | 1356 | |
|
1378 | 1357 | |
|
1379 | 1358 | @pytest.fixture(autouse=True) |
|
1380 | 1359 | def collect_appenlight_stats(request, testrun): |
|
1381 | 1360 | """ |
|
1382 | 1361 | This fixture reports memory consumtion of single tests. |
|
1383 | 1362 | |
|
1384 | 1363 | It gathers data based on `psutil` and sends them to Appenlight. The option |
|
1385 | 1364 | ``--ae`` has te be used to enable this fixture and the API key for your |
|
1386 | 1365 | application has to be provided in ``--ae-key``. |
|
1387 | 1366 | """ |
|
1388 | 1367 | try: |
|
1389 | 1368 | # cygwin cannot have yet psutil support. |
|
1390 | 1369 | import psutil |
|
1391 | 1370 | except ImportError: |
|
1392 | 1371 | return |
|
1393 | 1372 | |
|
1394 | 1373 | if not request.config.getoption('--appenlight'): |
|
1395 | 1374 | return |
|
1396 | 1375 | else: |
|
1397 | 1376 | # Only request the pylonsapp fixture if appenlight tracking is |
|
1398 | 1377 | # enabled. This will speed up a test run of unit tests by 2 to 3 |
|
1399 | 1378 | # seconds if appenlight is not enabled. |
|
1400 | 1379 | pylonsapp = request.getfuncargvalue("pylonsapp") |
|
1401 | 1380 | url = '{}/api/logs'.format(request.config.getoption('--appenlight-url')) |
|
1402 | 1381 | client = AppenlightClient( |
|
1403 | 1382 | url=url, |
|
1404 | 1383 | api_key=request.config.getoption('--appenlight-api-key'), |
|
1405 | 1384 | namespace=request.node.nodeid, |
|
1406 | 1385 | request=str(testrun['uuid']), |
|
1407 | 1386 | testrun=testrun) |
|
1408 | 1387 | |
|
1409 | 1388 | client.collect({ |
|
1410 | 1389 | 'message': "Starting", |
|
1411 | 1390 | }) |
|
1412 | 1391 | |
|
1413 | 1392 | server_and_port = pylonsapp.config['vcs.server'] |
|
1414 | server = create_vcsserver_proxy(server_and_port) | |
|
1393 | protocol = pylonsapp.config['vcs.server.protocol'] | |
|
1394 | server = create_vcsserver_proxy(server_and_port, protocol) | |
|
1415 | 1395 | with server: |
|
1416 | 1396 | vcs_pid = server.get_pid() |
|
1417 | 1397 | server.run_gc() |
|
1418 | 1398 | vcs_process = psutil.Process(vcs_pid) |
|
1419 | 1399 | mem = vcs_process.memory_info() |
|
1420 | 1400 | client.tag_before('vcsserver.rss', mem.rss) |
|
1421 | 1401 | client.tag_before('vcsserver.vms', mem.vms) |
|
1422 | 1402 | |
|
1423 | 1403 | test_process = psutil.Process() |
|
1424 | 1404 | mem = test_process.memory_info() |
|
1425 | 1405 | client.tag_before('test.rss', mem.rss) |
|
1426 | 1406 | client.tag_before('test.vms', mem.vms) |
|
1427 | 1407 | |
|
1428 | 1408 | client.tag_before('time', time.time()) |
|
1429 | 1409 | |
|
1430 | 1410 | @request.addfinalizer |
|
1431 | 1411 | def send_stats(): |
|
1432 | 1412 | client.tag_after('time', time.time()) |
|
1433 | 1413 | with server: |
|
1434 | 1414 | gc_stats = server.run_gc() |
|
1435 | 1415 | for tag, value in gc_stats.items(): |
|
1436 | 1416 | client.tag_after(tag, value) |
|
1437 | 1417 | mem = vcs_process.memory_info() |
|
1438 | 1418 | client.tag_after('vcsserver.rss', mem.rss) |
|
1439 | 1419 | client.tag_after('vcsserver.vms', mem.vms) |
|
1440 | 1420 | |
|
1441 | 1421 | mem = test_process.memory_info() |
|
1442 | 1422 | client.tag_after('test.rss', mem.rss) |
|
1443 | 1423 | client.tag_after('test.vms', mem.vms) |
|
1444 | 1424 | |
|
1445 | 1425 | client.collect({ |
|
1446 | 1426 | 'message': "Finished", |
|
1447 | 1427 | }) |
|
1448 | 1428 | client.send_stats() |
|
1449 | 1429 | |
|
1450 | 1430 | return client |
|
1451 | 1431 | |
|
1452 | 1432 | |
|
1453 | 1433 | class AppenlightClient(): |
|
1454 | 1434 | |
|
1455 | 1435 | url_template = '{url}?protocol_version=0.5' |
|
1456 | 1436 | |
|
1457 | 1437 | def __init__( |
|
1458 | 1438 | self, url, api_key, add_server=True, add_timestamp=True, |
|
1459 | 1439 | namespace=None, request=None, testrun=None): |
|
1460 | 1440 | self.url = self.url_template.format(url=url) |
|
1461 | 1441 | self.api_key = api_key |
|
1462 | 1442 | self.add_server = add_server |
|
1463 | 1443 | self.add_timestamp = add_timestamp |
|
1464 | 1444 | self.namespace = namespace |
|
1465 | 1445 | self.request = request |
|
1466 | 1446 | self.server = socket.getfqdn(socket.gethostname()) |
|
1467 | 1447 | self.tags_before = {} |
|
1468 | 1448 | self.tags_after = {} |
|
1469 | 1449 | self.stats = [] |
|
1470 | 1450 | self.testrun = testrun or {} |
|
1471 | 1451 | |
|
1472 | 1452 | def tag_before(self, tag, value): |
|
1473 | 1453 | self.tags_before[tag] = value |
|
1474 | 1454 | |
|
1475 | 1455 | def tag_after(self, tag, value): |
|
1476 | 1456 | self.tags_after[tag] = value |
|
1477 | 1457 | |
|
1478 | 1458 | def collect(self, data): |
|
1479 | 1459 | if self.add_server: |
|
1480 | 1460 | data.setdefault('server', self.server) |
|
1481 | 1461 | if self.add_timestamp: |
|
1482 | 1462 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) |
|
1483 | 1463 | if self.namespace: |
|
1484 | 1464 | data.setdefault('namespace', self.namespace) |
|
1485 | 1465 | if self.request: |
|
1486 | 1466 | data.setdefault('request', self.request) |
|
1487 | 1467 | self.stats.append(data) |
|
1488 | 1468 | |
|
1489 | 1469 | def send_stats(self): |
|
1490 | 1470 | tags = [ |
|
1491 | 1471 | ('testrun', self.request), |
|
1492 | 1472 | ('testrun.start', self.testrun['start']), |
|
1493 | 1473 | ('testrun.timestamp', self.testrun['timestamp']), |
|
1494 | 1474 | ('test', self.namespace), |
|
1495 | 1475 | ] |
|
1496 | 1476 | for key, value in self.tags_before.items(): |
|
1497 | 1477 | tags.append((key + '.before', value)) |
|
1498 | 1478 | try: |
|
1499 | 1479 | delta = self.tags_after[key] - value |
|
1500 | 1480 | tags.append((key + '.delta', delta)) |
|
1501 | 1481 | except Exception: |
|
1502 | 1482 | pass |
|
1503 | 1483 | for key, value in self.tags_after.items(): |
|
1504 | 1484 | tags.append((key + '.after', value)) |
|
1505 | 1485 | self.collect({ |
|
1506 | 1486 | 'message': "Collected tags", |
|
1507 | 1487 | 'tags': tags, |
|
1508 | 1488 | }) |
|
1509 | 1489 | |
|
1510 | 1490 | response = requests.post( |
|
1511 | 1491 | self.url, |
|
1512 | 1492 | headers={ |
|
1513 | 1493 | 'X-appenlight-api-key': self.api_key}, |
|
1514 | 1494 | json=self.stats, |
|
1515 | 1495 | ) |
|
1516 | 1496 | |
|
1517 | 1497 | if not response.status_code == 200: |
|
1518 | 1498 | pprint.pprint(self.stats) |
|
1519 | 1499 | print response.headers |
|
1520 | 1500 | print response.text |
|
1521 | 1501 | raise Exception('Sending to appenlight failed') |
|
1522 | 1502 | |
|
1523 | 1503 | |
|
1524 | 1504 | @pytest.fixture |
|
1525 | 1505 | def gist_util(request, pylonsapp): |
|
1526 | 1506 | """ |
|
1527 | 1507 | Provides a wired instance of `GistUtility` with integrated cleanup. |
|
1528 | 1508 | """ |
|
1529 | 1509 | utility = GistUtility() |
|
1530 | 1510 | request.addfinalizer(utility.cleanup) |
|
1531 | 1511 | return utility |
|
1532 | 1512 | |
|
1533 | 1513 | |
|
1534 | 1514 | class GistUtility(object): |
|
1535 | 1515 | def __init__(self): |
|
1536 | 1516 | self.fixture = Fixture() |
|
1537 | 1517 | self.gist_ids = [] |
|
1538 | 1518 | |
|
1539 | 1519 | def create_gist(self, **kwargs): |
|
1540 | 1520 | gist = self.fixture.create_gist(**kwargs) |
|
1541 | 1521 | self.gist_ids.append(gist.gist_id) |
|
1542 | 1522 | return gist |
|
1543 | 1523 | |
|
1544 | 1524 | def cleanup(self): |
|
1545 | 1525 | for id_ in self.gist_ids: |
|
1546 | 1526 | self.fixture.destroy_gists(str(id_)) |
|
1547 | 1527 | |
|
1548 | 1528 | |
|
1549 | 1529 | @pytest.fixture |
|
1550 | 1530 | def enabled_backends(request): |
|
1551 | 1531 | backends = request.config.option.backends |
|
1552 | 1532 | return backends[:] |
|
1553 | 1533 | |
|
1554 | 1534 | |
|
1555 | 1535 | @pytest.fixture |
|
1556 | 1536 | def settings_util(request): |
|
1557 | 1537 | """ |
|
1558 | 1538 | Provides a wired instance of `SettingsUtility` with integrated cleanup. |
|
1559 | 1539 | """ |
|
1560 | 1540 | utility = SettingsUtility() |
|
1561 | 1541 | request.addfinalizer(utility.cleanup) |
|
1562 | 1542 | return utility |
|
1563 | 1543 | |
|
1564 | 1544 | |
|
1565 | 1545 | class SettingsUtility(object): |
|
1566 | 1546 | def __init__(self): |
|
1567 | 1547 | self.rhodecode_ui_ids = [] |
|
1568 | 1548 | self.rhodecode_setting_ids = [] |
|
1569 | 1549 | self.repo_rhodecode_ui_ids = [] |
|
1570 | 1550 | self.repo_rhodecode_setting_ids = [] |
|
1571 | 1551 | |
|
1572 | 1552 | def create_repo_rhodecode_ui( |
|
1573 | 1553 | self, repo, section, value, key=None, active=True, cleanup=True): |
|
1574 | 1554 | key = key or hashlib.sha1( |
|
1575 | 1555 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() |
|
1576 | 1556 | |
|
1577 | 1557 | setting = RepoRhodeCodeUi() |
|
1578 | 1558 | setting.repository_id = repo.repo_id |
|
1579 | 1559 | setting.ui_section = section |
|
1580 | 1560 | setting.ui_value = value |
|
1581 | 1561 | setting.ui_key = key |
|
1582 | 1562 | setting.ui_active = active |
|
1583 | 1563 | Session().add(setting) |
|
1584 | 1564 | Session().commit() |
|
1585 | 1565 | |
|
1586 | 1566 | if cleanup: |
|
1587 | 1567 | self.repo_rhodecode_ui_ids.append(setting.ui_id) |
|
1588 | 1568 | return setting |
|
1589 | 1569 | |
|
1590 | 1570 | def create_rhodecode_ui( |
|
1591 | 1571 | self, section, value, key=None, active=True, cleanup=True): |
|
1592 | 1572 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() |
|
1593 | 1573 | |
|
1594 | 1574 | setting = RhodeCodeUi() |
|
1595 | 1575 | setting.ui_section = section |
|
1596 | 1576 | setting.ui_value = value |
|
1597 | 1577 | setting.ui_key = key |
|
1598 | 1578 | setting.ui_active = active |
|
1599 | 1579 | Session().add(setting) |
|
1600 | 1580 | Session().commit() |
|
1601 | 1581 | |
|
1602 | 1582 | if cleanup: |
|
1603 | 1583 | self.rhodecode_ui_ids.append(setting.ui_id) |
|
1604 | 1584 | return setting |
|
1605 | 1585 | |
|
1606 | 1586 | def create_repo_rhodecode_setting( |
|
1607 | 1587 | self, repo, name, value, type_, cleanup=True): |
|
1608 | 1588 | setting = RepoRhodeCodeSetting( |
|
1609 | 1589 | repo.repo_id, key=name, val=value, type=type_) |
|
1610 | 1590 | Session().add(setting) |
|
1611 | 1591 | Session().commit() |
|
1612 | 1592 | |
|
1613 | 1593 | if cleanup: |
|
1614 | 1594 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) |
|
1615 | 1595 | return setting |
|
1616 | 1596 | |
|
1617 | 1597 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): |
|
1618 | 1598 | setting = RhodeCodeSetting(key=name, val=value, type=type_) |
|
1619 | 1599 | Session().add(setting) |
|
1620 | 1600 | Session().commit() |
|
1621 | 1601 | |
|
1622 | 1602 | if cleanup: |
|
1623 | 1603 | self.rhodecode_setting_ids.append(setting.app_settings_id) |
|
1624 | 1604 | |
|
1625 | 1605 | return setting |
|
1626 | 1606 | |
|
1627 | 1607 | def cleanup(self): |
|
1628 | 1608 | for id_ in self.rhodecode_ui_ids: |
|
1629 | 1609 | setting = RhodeCodeUi.get(id_) |
|
1630 | 1610 | Session().delete(setting) |
|
1631 | 1611 | |
|
1632 | 1612 | for id_ in self.rhodecode_setting_ids: |
|
1633 | 1613 | setting = RhodeCodeSetting.get(id_) |
|
1634 | 1614 | Session().delete(setting) |
|
1635 | 1615 | |
|
1636 | 1616 | for id_ in self.repo_rhodecode_ui_ids: |
|
1637 | 1617 | setting = RepoRhodeCodeUi.get(id_) |
|
1638 | 1618 | Session().delete(setting) |
|
1639 | 1619 | |
|
1640 | 1620 | for id_ in self.repo_rhodecode_setting_ids: |
|
1641 | 1621 | setting = RepoRhodeCodeSetting.get(id_) |
|
1642 | 1622 | Session().delete(setting) |
|
1643 | 1623 | |
|
1644 | 1624 | Session().commit() |
|
1645 | 1625 | |
|
1646 | 1626 | |
|
1647 | 1627 | @pytest.fixture |
|
1648 | 1628 | def no_notifications(request): |
|
1649 | 1629 | notification_patcher = mock.patch( |
|
1650 | 1630 | 'rhodecode.model.notification.NotificationModel.create') |
|
1651 | 1631 | notification_patcher.start() |
|
1652 | 1632 | request.addfinalizer(notification_patcher.stop) |
|
1653 | 1633 | |
|
1654 | 1634 | |
|
1655 | 1635 | @pytest.fixture |
|
1656 | 1636 | def silence_action_logger(request): |
|
1657 | 1637 | notification_patcher = mock.patch( |
|
1658 | 1638 | 'rhodecode.lib.utils.action_logger') |
|
1659 | 1639 | notification_patcher.start() |
|
1660 | 1640 | request.addfinalizer(notification_patcher.stop) |
|
1661 | 1641 | |
|
1662 | 1642 | |
|
1663 | 1643 | @pytest.fixture(scope='session') |
|
1664 | 1644 | def repeat(request): |
|
1665 | 1645 | """ |
|
1666 | 1646 | The number of repetitions is based on this fixture. |
|
1667 | 1647 | |
|
1668 | 1648 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the |
|
1669 | 1649 | tests are not too slow in our default test suite. |
|
1670 | 1650 | """ |
|
1671 | 1651 | return request.config.getoption('--repeat') |
|
1672 | 1652 | |
|
1673 | 1653 | |
|
1674 | 1654 | @pytest.fixture |
|
1675 | 1655 | def rhodecode_fixtures(): |
|
1676 | 1656 | return Fixture() |
|
1677 | 1657 | |
|
1678 | 1658 | |
|
1679 | 1659 | @pytest.fixture |
|
1680 | 1660 | def request_stub(): |
|
1681 | 1661 | """ |
|
1682 | 1662 | Stub request object. |
|
1683 | 1663 | """ |
|
1684 | 1664 | request = pyramid.testing.DummyRequest() |
|
1685 | 1665 | request.scheme = 'https' |
|
1686 | 1666 | return request |
|
1687 | 1667 | |
|
1688 | 1668 | |
|
1689 | 1669 | @pytest.fixture |
|
1690 | 1670 | def config_stub(request, request_stub): |
|
1691 | 1671 | """ |
|
1692 | 1672 | Set up pyramid.testing and return the Configurator. |
|
1693 | 1673 | """ |
|
1694 | 1674 | config = pyramid.testing.setUp(request=request_stub) |
|
1695 | 1675 | |
|
1696 | 1676 | @request.addfinalizer |
|
1697 | 1677 | def cleanup(): |
|
1698 | 1678 | pyramid.testing.tearDown() |
|
1699 | 1679 | |
|
1700 | 1680 | return config |
|
1701 | 1681 | |
|
1702 | 1682 | |
|
1703 | 1683 | @pytest.fixture |
|
1704 | 1684 | def StubIntegrationType(): |
|
1705 | 1685 | class _StubIntegrationType(IntegrationTypeBase): |
|
1706 | 1686 | """ Test integration type class """ |
|
1707 | 1687 | |
|
1708 | 1688 | key = 'test' |
|
1709 | 1689 | display_name = 'Test integration type' |
|
1710 | 1690 | description = 'A test integration type for testing' |
|
1711 | 1691 | icon = 'test_icon_html_image' |
|
1712 | 1692 | |
|
1713 | 1693 | def __init__(self, settings): |
|
1714 | 1694 | super(_StubIntegrationType, self).__init__(settings) |
|
1715 | 1695 | self.sent_events = [] # for testing |
|
1716 | 1696 | |
|
1717 | 1697 | def send_event(self, event): |
|
1718 | 1698 | self.sent_events.append(event) |
|
1719 | 1699 | |
|
1720 | 1700 | def settings_schema(self): |
|
1721 | 1701 | class SettingsSchema(colander.Schema): |
|
1722 | 1702 | test_string_field = colander.SchemaNode( |
|
1723 | 1703 | colander.String(), |
|
1724 | 1704 | missing=colander.required, |
|
1725 | 1705 | title='test string field', |
|
1726 | 1706 | ) |
|
1727 | 1707 | test_int_field = colander.SchemaNode( |
|
1728 | 1708 | colander.Int(), |
|
1729 | 1709 | title='some integer setting', |
|
1730 | 1710 | ) |
|
1731 | 1711 | return SettingsSchema() |
|
1732 | 1712 | |
|
1733 | 1713 | |
|
1734 | 1714 | integration_type_registry.register_integration_type(_StubIntegrationType) |
|
1735 | 1715 | return _StubIntegrationType |
|
1736 | 1716 | |
|
1737 | 1717 | @pytest.fixture |
|
1738 | 1718 | def stub_integration_settings(): |
|
1739 | 1719 | return { |
|
1740 | 1720 | 'test_string_field': 'some data', |
|
1741 | 1721 | 'test_int_field': 100, |
|
1742 | 1722 | } |
|
1743 | 1723 | |
|
1744 | 1724 | |
|
1745 | 1725 | @pytest.fixture |
|
1746 | 1726 | def repo_integration_stub(request, repo_stub, StubIntegrationType, |
|
1747 | 1727 | stub_integration_settings): |
|
1748 | 1728 | integration = IntegrationModel().create( |
|
1749 | 1729 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1750 | 1730 | name='test repo integration', |
|
1751 | 1731 | repo=repo_stub, repo_group=None, child_repos_only=None) |
|
1752 | 1732 | |
|
1753 | 1733 | @request.addfinalizer |
|
1754 | 1734 | def cleanup(): |
|
1755 | 1735 | IntegrationModel().delete(integration) |
|
1756 | 1736 | |
|
1757 | 1737 | return integration |
|
1758 | 1738 | |
|
1759 | 1739 | |
|
1760 | 1740 | @pytest.fixture |
|
1761 | 1741 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, |
|
1762 | 1742 | stub_integration_settings): |
|
1763 | 1743 | integration = IntegrationModel().create( |
|
1764 | 1744 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1765 | 1745 | name='test repogroup integration', |
|
1766 | 1746 | repo=None, repo_group=test_repo_group, child_repos_only=True) |
|
1767 | 1747 | |
|
1768 | 1748 | @request.addfinalizer |
|
1769 | 1749 | def cleanup(): |
|
1770 | 1750 | IntegrationModel().delete(integration) |
|
1771 | 1751 | |
|
1772 | 1752 | return integration |
|
1773 | 1753 | |
|
1774 | 1754 | |
|
1775 | 1755 | @pytest.fixture |
|
1776 | 1756 | def repogroup_recursive_integration_stub(request, test_repo_group, |
|
1777 | 1757 | StubIntegrationType, stub_integration_settings): |
|
1778 | 1758 | integration = IntegrationModel().create( |
|
1779 | 1759 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1780 | 1760 | name='test recursive repogroup integration', |
|
1781 | 1761 | repo=None, repo_group=test_repo_group, child_repos_only=False) |
|
1782 | 1762 | |
|
1783 | 1763 | @request.addfinalizer |
|
1784 | 1764 | def cleanup(): |
|
1785 | 1765 | IntegrationModel().delete(integration) |
|
1786 | 1766 | |
|
1787 | 1767 | return integration |
|
1788 | 1768 | |
|
1789 | 1769 | |
|
1790 | 1770 | @pytest.fixture |
|
1791 | 1771 | def global_integration_stub(request, StubIntegrationType, |
|
1792 | 1772 | stub_integration_settings): |
|
1793 | 1773 | integration = IntegrationModel().create( |
|
1794 | 1774 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1795 | 1775 | name='test global integration', |
|
1796 | 1776 | repo=None, repo_group=None, child_repos_only=None) |
|
1797 | 1777 | |
|
1798 | 1778 | @request.addfinalizer |
|
1799 | 1779 | def cleanup(): |
|
1800 | 1780 | IntegrationModel().delete(integration) |
|
1801 | 1781 | |
|
1802 | 1782 | return integration |
|
1803 | 1783 | |
|
1804 | 1784 | |
|
1805 | 1785 | @pytest.fixture |
|
1806 | 1786 | def root_repos_integration_stub(request, StubIntegrationType, |
|
1807 | 1787 | stub_integration_settings): |
|
1808 | 1788 | integration = IntegrationModel().create( |
|
1809 | 1789 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1810 | 1790 | name='test global integration', |
|
1811 | 1791 | repo=None, repo_group=None, child_repos_only=True) |
|
1812 | 1792 | |
|
1813 | 1793 | @request.addfinalizer |
|
1814 | 1794 | def cleanup(): |
|
1815 | 1795 | IntegrationModel().delete(integration) |
|
1816 | 1796 | |
|
1817 | 1797 | return integration |
|
1818 | 1798 | |
|
1819 | 1799 | |
|
1820 | 1800 | @pytest.fixture |
|
1821 | 1801 | def local_dt_to_utc(): |
|
1822 | 1802 | def _factory(dt): |
|
1823 | 1803 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( |
|
1824 | 1804 | dateutil.tz.tzutc()).replace(tzinfo=None) |
|
1825 | 1805 | return _factory |
@@ -1,471 +1,422 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import json |
|
22 | 22 | import logging.config |
|
23 | 23 | import os |
|
24 | 24 | import platform |
|
25 | 25 | import socket |
|
26 | 26 | import subprocess32 |
|
27 | 27 | import time |
|
28 | 28 | from urllib2 import urlopen, URLError |
|
29 | 29 | |
|
30 | 30 | import configobj |
|
31 | 31 | import pylons |
|
32 | 32 | import pytest |
|
33 | 33 | import webob |
|
34 | 34 | from beaker.session import SessionObject |
|
35 | 35 | from paste.deploy import loadapp |
|
36 | 36 | from pylons.i18n.translation import _get_translator |
|
37 | 37 | from pylons.util import ContextObj |
|
38 | from Pyro4.errors import CommunicationError | |
|
39 | 38 | from routes.util import URLGenerator |
|
40 | 39 | |
|
41 | 40 | from rhodecode.lib import vcs |
|
42 | 41 | from rhodecode.tests.fixture import TestINI |
|
43 | 42 | import rhodecode |
|
44 | 43 | |
|
45 | 44 | |
|
46 | 45 | def _parse_json(value): |
|
47 | 46 | return json.loads(value) if value else None |
|
48 | 47 | |
|
49 | 48 | |
|
50 | 49 | def pytest_addoption(parser): |
|
51 | 50 | parser.addoption( |
|
52 | 51 | '--test-loglevel', dest='test_loglevel', |
|
53 | 52 | help="Set default Logging level for tests, warn (default), info, debug") |
|
54 | 53 | group = parser.getgroup('pylons') |
|
55 | 54 | group.addoption( |
|
56 | 55 | '--with-pylons', dest='pylons_config', |
|
57 | 56 | help="Set up a Pylons environment with the specified config file.") |
|
58 | 57 | group.addoption( |
|
59 | 58 | '--pylons-config-override', action='store', type=_parse_json, |
|
60 | 59 | default=None, dest='pylons_config_override', help=( |
|
61 | 60 | "Overrides the .ini file settings. Should be specified in JSON" |
|
62 | 61 | " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'" |
|
63 | 62 | ) |
|
64 | 63 | ) |
|
65 | 64 | parser.addini( |
|
66 | 65 | 'pylons_config', |
|
67 | 66 | "Set up a Pylons environment with the specified config file.") |
|
68 | 67 | |
|
69 | 68 | vcsgroup = parser.getgroup('vcs') |
|
70 | 69 | vcsgroup.addoption( |
|
71 | 70 | '--without-vcsserver', dest='with_vcsserver', action='store_false', |
|
72 | 71 | help="Do not start the VCSServer in a background process.") |
|
73 | 72 | vcsgroup.addoption( |
|
74 | '--with-vcsserver', dest='vcsserver_config_pyro4', | |
|
75 | help="Start the VCSServer with the specified config file.") | |
|
76 | vcsgroup.addoption( | |
|
77 | 73 | '--with-vcsserver-http', dest='vcsserver_config_http', |
|
78 | 74 | help="Start the HTTP VCSServer with the specified config file.") |
|
79 | 75 | vcsgroup.addoption( |
|
80 | 76 | '--vcsserver-protocol', dest='vcsserver_protocol', |
|
81 |
help="Start the VCSServer with HTTP |
|
|
77 | help="Start the VCSServer with HTTP protocol support.") | |
|
82 | 78 | vcsgroup.addoption( |
|
83 | 79 | '--vcsserver-config-override', action='store', type=_parse_json, |
|
84 | 80 | default=None, dest='vcsserver_config_override', help=( |
|
85 | 81 | "Overrides the .ini file settings for the VCSServer. " |
|
86 | 82 | "Should be specified in JSON " |
|
87 | 83 | "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'" |
|
88 | 84 | ) |
|
89 | 85 | ) |
|
90 | 86 | vcsgroup.addoption( |
|
91 | 87 | '--vcsserver-port', action='store', type=int, |
|
92 | 88 | default=None, help=( |
|
93 | 89 | "Allows to set the port of the vcsserver. Useful when testing " |
|
94 | 90 | "against an already running server and random ports cause " |
|
95 | 91 | "trouble.")) |
|
96 | 92 | parser.addini( |
|
97 | 'vcsserver_config_pyro4', | |
|
98 | "Start the VCSServer with the specified config file.") | |
|
99 | parser.addini( | |
|
100 | 93 | 'vcsserver_config_http', |
|
101 | 94 | "Start the HTTP VCSServer with the specified config file.") |
|
102 | 95 | parser.addini( |
|
103 | 96 | 'vcsserver_protocol', |
|
104 |
"Start the VCSServer with HTTP |
|
|
97 | "Start the VCSServer with HTTP protocol support.") | |
|
105 | 98 | |
|
106 | 99 | |
|
107 | 100 | @pytest.fixture(scope='session') |
|
108 | 101 | def vcsserver(request, vcsserver_port, vcsserver_factory): |
|
109 | 102 | """ |
|
110 | 103 | Session scope VCSServer. |
|
111 | 104 | |
|
112 | 105 | Tests wich need the VCSServer have to rely on this fixture in order |
|
113 | 106 | to ensure it will be running. |
|
114 | 107 | |
|
115 | 108 | For specific needs, the fixture vcsserver_factory can be used. It allows to |
|
116 | 109 | adjust the configuration file for the test run. |
|
117 | 110 | |
|
118 | 111 | Command line args: |
|
119 | 112 | |
|
120 | 113 | --without-vcsserver: Allows to switch this fixture off. You have to |
|
121 | 114 | manually start the server. |
|
122 | 115 | |
|
123 | 116 | --vcsserver-port: Will expect the VCSServer to listen on this port. |
|
124 | 117 | """ |
|
125 | 118 | |
|
126 | 119 | if not request.config.getoption('with_vcsserver'): |
|
127 | 120 | return None |
|
128 | 121 | |
|
129 | 122 | use_http = _use_vcs_http_server(request.config) |
|
130 | 123 | return vcsserver_factory( |
|
131 | 124 | request, use_http=use_http, vcsserver_port=vcsserver_port) |
|
132 | 125 | |
|
133 | 126 | |
|
134 | 127 | @pytest.fixture(scope='session') |
|
135 | 128 | def vcsserver_factory(tmpdir_factory): |
|
136 | 129 | """ |
|
137 | 130 | Use this if you need a running vcsserver with a special configuration. |
|
138 | 131 | """ |
|
139 | 132 | |
|
140 | 133 | def factory(request, use_http=True, overrides=(), vcsserver_port=None): |
|
141 | 134 | |
|
142 | 135 | if vcsserver_port is None: |
|
143 | 136 | vcsserver_port = get_available_port() |
|
144 | 137 | |
|
145 | 138 | overrides = list(overrides) |
|
146 | 139 | if use_http: |
|
147 | 140 | overrides.append({'server:main': {'port': vcsserver_port}}) |
|
148 | 141 | else: |
|
149 | 142 | overrides.append({'DEFAULT': {'port': vcsserver_port}}) |
|
150 | 143 | |
|
151 | 144 | if is_cygwin(): |
|
152 | 145 | platform_override = {'DEFAULT': { |
|
153 | 146 | 'beaker.cache.repo_object.type': 'nocache'}} |
|
154 | 147 | overrides.append(platform_override) |
|
155 | 148 | |
|
156 | option_name = ( | |
|
157 | 'vcsserver_config_http' if use_http else 'vcsserver_config_pyro4') | |
|
149 | option_name = 'vcsserver_config_http' if use_http else '' | |
|
158 | 150 | override_option_name = 'vcsserver_config_override' |
|
159 | 151 | config_file = get_config( |
|
160 | 152 | request.config, option_name=option_name, |
|
161 | 153 | override_option_name=override_option_name, overrides=overrides, |
|
162 | 154 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
163 | 155 | prefix='test_vcs_') |
|
164 | 156 | |
|
165 |
print |
|
|
166 |
ServerClass = HttpVCSServer if use_http else |
|
|
157 | print("Using the VCSServer configuration:{}".format(config_file)) | |
|
158 | ServerClass = HttpVCSServer if use_http else None | |
|
167 | 159 | server = ServerClass(config_file) |
|
168 | 160 | server.start() |
|
169 | 161 | |
|
170 | 162 | @request.addfinalizer |
|
171 | 163 | def cleanup(): |
|
172 | 164 | server.shutdown() |
|
173 | 165 | |
|
174 | 166 | server.wait_until_ready() |
|
175 | 167 | return server |
|
176 | 168 | |
|
177 | 169 | return factory |
|
178 | 170 | |
|
179 | 171 | |
|
180 | 172 | def is_cygwin(): |
|
181 | 173 | return 'cygwin' in platform.system().lower() |
|
182 | 174 | |
|
183 | 175 | |
|
184 | 176 | def _use_vcs_http_server(config): |
|
185 | 177 | protocol_option = 'vcsserver_protocol' |
|
186 | 178 | protocol = ( |
|
187 | 179 | config.getoption(protocol_option) or |
|
188 | 180 | config.getini(protocol_option) or |
|
189 | 181 | 'http') |
|
190 | 182 | return protocol == 'http' |
|
191 | 183 | |
|
192 | 184 | |
|
193 | 185 | def _use_log_level(config): |
|
194 | 186 | level = config.getoption('test_loglevel') or 'warn' |
|
195 | 187 | return level.upper() |
|
196 | 188 | |
|
197 | 189 | |
|
198 | 190 | class VCSServer(object): |
|
199 | 191 | """ |
|
200 | 192 | Represents a running VCSServer instance. |
|
201 | 193 | """ |
|
202 | 194 | |
|
203 | 195 | _args = [] |
|
204 | 196 | |
|
205 | 197 | def start(self): |
|
206 | 198 | print("Starting the VCSServer: {}".format(self._args)) |
|
207 | 199 | self.process = subprocess32.Popen(self._args) |
|
208 | 200 | |
|
209 | 201 | def wait_until_ready(self, timeout=30): |
|
210 | 202 | raise NotImplementedError() |
|
211 | 203 | |
|
212 | 204 | def shutdown(self): |
|
213 | 205 | self.process.kill() |
|
214 | 206 | |
|
215 | 207 | |
|
216 | class Pyro4VCSServer(VCSServer): | |
|
217 | def __init__(self, config_file): | |
|
218 | """ | |
|
219 | :param config_file: The config file to start the server with | |
|
220 | """ | |
|
221 | ||
|
222 | config_data = configobj.ConfigObj(config_file) | |
|
223 | self._config = config_data['DEFAULT'] | |
|
224 | ||
|
225 | args = ['vcsserver', '--config', config_file] | |
|
226 | self._args = args | |
|
227 | ||
|
228 | def wait_until_ready(self, timeout=30): | |
|
229 | remote_server = vcs.create_vcsserver_proxy( | |
|
230 | self.server_and_port, 'pyro4') | |
|
231 | start = time.time() | |
|
232 | with remote_server: | |
|
233 | while time.time() - start < timeout: | |
|
234 | try: | |
|
235 | remote_server.ping() | |
|
236 | break | |
|
237 | except CommunicationError: | |
|
238 | time.sleep(0.2) | |
|
239 | else: | |
|
240 | pytest.exit( | |
|
241 | "Starting the VCSServer failed or took more than {} " | |
|
242 | "seconds.".format(timeout)) | |
|
243 | ||
|
244 | @property | |
|
245 | def server_and_port(self): | |
|
246 | return '{host}:{port}'.format(**self._config) | |
|
247 | ||
|
248 | ||
|
249 | 208 | class HttpVCSServer(VCSServer): |
|
250 | 209 | """ |
|
251 | 210 | Represents a running VCSServer instance. |
|
252 | 211 | """ |
|
253 | 212 | def __init__(self, config_file): |
|
254 | 213 | config_data = configobj.ConfigObj(config_file) |
|
255 | 214 | self._config = config_data['server:main'] |
|
256 | 215 | |
|
257 | 216 | args = ['pserve', config_file] |
|
258 | 217 | self._args = args |
|
259 | 218 | |
|
260 | 219 | @property |
|
261 | 220 | def http_url(self): |
|
262 | 221 | template = 'http://{host}:{port}/' |
|
263 | 222 | return template.format(**self._config) |
|
264 | 223 | |
|
265 | 224 | def start(self): |
|
266 | 225 | self.process = subprocess32.Popen(self._args) |
|
267 | 226 | |
|
268 | 227 | def wait_until_ready(self, timeout=30): |
|
269 | 228 | host = self._config['host'] |
|
270 | 229 | port = self._config['port'] |
|
271 | 230 | status_url = 'http://{host}:{port}/status'.format(host=host, port=port) |
|
272 | 231 | start = time.time() |
|
273 | 232 | |
|
274 | 233 | while time.time() - start < timeout: |
|
275 | 234 | try: |
|
276 | 235 | urlopen(status_url) |
|
277 | 236 | break |
|
278 | 237 | except URLError: |
|
279 | 238 | time.sleep(0.2) |
|
280 | 239 | else: |
|
281 | 240 | pytest.exit( |
|
282 | 241 | "Starting the VCSServer failed or took more than {} " |
|
283 | 242 | "seconds. cmd: `{}`".format(timeout, ' '.join(self._args))) |
|
284 | 243 | |
|
285 | 244 | def shutdown(self): |
|
286 | 245 | self.process.kill() |
|
287 | 246 | |
|
288 | 247 | |
|
289 | 248 | @pytest.fixture(scope='session') |
|
290 | 249 | def pylons_config(request, tmpdir_factory, rcserver_port, vcsserver_port): |
|
291 | 250 | option_name = 'pylons_config' |
|
292 | 251 | log_level = _use_log_level(request.config) |
|
293 | 252 | |
|
294 | 253 | overrides = [ |
|
295 | 254 | {'server:main': {'port': rcserver_port}}, |
|
296 | 255 | {'app:main': { |
|
297 | 256 | 'vcs.server': 'localhost:%s' % vcsserver_port, |
|
298 | 257 | # johbo: We will always start the VCSServer on our own based on the |
|
299 | 258 | # fixtures of the test cases. For the test run it must always be |
|
300 | 259 | # off in the INI file. |
|
301 | 260 | 'vcs.start_server': 'false', |
|
302 | 261 | }}, |
|
303 | 262 | |
|
304 | 263 | {'handler_console': { |
|
305 | 264 | 'class ': 'StreamHandler', |
|
306 | 265 | 'args ': '(sys.stderr,)', |
|
307 | 266 | 'level': log_level, |
|
308 | 267 | }}, |
|
309 | 268 | |
|
310 | 269 | ] |
|
311 | 270 | if _use_vcs_http_server(request.config): |
|
312 | 271 | overrides.append({ |
|
313 | 272 | 'app:main': { |
|
314 | 273 | 'vcs.server.protocol': 'http', |
|
315 | 274 | 'vcs.scm_app_implementation': 'http', |
|
316 | 275 | 'vcs.hooks.protocol': 'http', |
|
317 | 276 | } |
|
318 | 277 | }) |
|
319 | else: | |
|
320 | overrides.append({ | |
|
321 | 'app:main': { | |
|
322 | 'vcs.server.protocol': 'pyro4', | |
|
323 | 'vcs.scm_app_implementation': 'pyro4', | |
|
324 | 'vcs.hooks.protocol': 'pyro4', | |
|
325 | } | |
|
326 | }) | |
|
327 | 278 | |
|
328 | 279 | filename = get_config( |
|
329 | 280 | request.config, option_name=option_name, |
|
330 | 281 | override_option_name='{}_override'.format(option_name), |
|
331 | 282 | overrides=overrides, |
|
332 | 283 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
333 | 284 | prefix='test_rce_') |
|
334 | 285 | return filename |
|
335 | 286 | |
|
336 | 287 | |
|
337 | 288 | @pytest.fixture(scope='session') |
|
338 | 289 | def rcserver_port(request): |
|
339 | 290 | port = get_available_port() |
|
340 |
print |
|
|
291 | print('Using rcserver port {}'.format(port)) | |
|
341 | 292 | return port |
|
342 | 293 | |
|
343 | 294 | |
|
344 | 295 | @pytest.fixture(scope='session') |
|
345 | 296 | def vcsserver_port(request): |
|
346 | 297 | port = request.config.getoption('--vcsserver-port') |
|
347 | 298 | if port is None: |
|
348 | 299 | port = get_available_port() |
|
349 |
print |
|
|
300 | print('Using vcsserver port {}'.format(port)) | |
|
350 | 301 | return port |
|
351 | 302 | |
|
352 | 303 | |
|
353 | 304 | def get_available_port(): |
|
354 | 305 | family = socket.AF_INET |
|
355 | 306 | socktype = socket.SOCK_STREAM |
|
356 | 307 | host = '127.0.0.1' |
|
357 | 308 | |
|
358 | 309 | mysocket = socket.socket(family, socktype) |
|
359 | 310 | mysocket.bind((host, 0)) |
|
360 | 311 | port = mysocket.getsockname()[1] |
|
361 | 312 | mysocket.close() |
|
362 | 313 | del mysocket |
|
363 | 314 | return port |
|
364 | 315 | |
|
365 | 316 | |
|
366 | 317 | @pytest.fixture(scope='session') |
|
367 | 318 | def available_port_factory(): |
|
368 | 319 | """ |
|
369 | 320 | Returns a callable which returns free port numbers. |
|
370 | 321 | """ |
|
371 | 322 | return get_available_port |
|
372 | 323 | |
|
373 | 324 | |
|
374 | 325 | @pytest.fixture |
|
375 | 326 | def available_port(available_port_factory): |
|
376 | 327 | """ |
|
377 | 328 | Gives you one free port for the current test. |
|
378 | 329 | |
|
379 | 330 | Uses "available_port_factory" to retrieve the port. |
|
380 | 331 | """ |
|
381 | 332 | return available_port_factory() |
|
382 | 333 | |
|
383 | 334 | |
|
384 | 335 | @pytest.fixture(scope='session') |
|
385 | 336 | def pylonsapp(pylons_config, vcsserver, http_environ_session): |
|
386 |
print |
|
|
337 | print("Using the RhodeCode configuration:{}".format(pylons_config)) | |
|
387 | 338 | logging.config.fileConfig( |
|
388 | 339 | pylons_config, disable_existing_loggers=False) |
|
389 | 340 | app = _setup_pylons_environment(pylons_config, http_environ_session) |
|
390 | 341 | return app |
|
391 | 342 | |
|
392 | 343 | |
|
393 | 344 | @pytest.fixture(scope='session') |
|
394 | 345 | def testini_factory(tmpdir_factory, pylons_config): |
|
395 | 346 | """ |
|
396 | 347 | Factory to create an INI file based on TestINI. |
|
397 | 348 | |
|
398 | 349 | It will make sure to place the INI file in the correct directory. |
|
399 | 350 | """ |
|
400 | 351 | basetemp = tmpdir_factory.getbasetemp().strpath |
|
401 | 352 | return TestIniFactory(basetemp, pylons_config) |
|
402 | 353 | |
|
403 | 354 | |
|
404 | 355 | class TestIniFactory(object): |
|
405 | 356 | |
|
406 | 357 | def __init__(self, basetemp, template_ini): |
|
407 | 358 | self._basetemp = basetemp |
|
408 | 359 | self._template_ini = template_ini |
|
409 | 360 | |
|
410 | 361 | def __call__(self, ini_params, new_file_prefix='test'): |
|
411 | 362 | ini_file = TestINI( |
|
412 | 363 | self._template_ini, ini_params=ini_params, |
|
413 | 364 | new_file_prefix=new_file_prefix, dir=self._basetemp) |
|
414 | 365 | result = ini_file.create() |
|
415 | 366 | return result |
|
416 | 367 | |
|
417 | 368 | |
|
418 | 369 | def get_config( |
|
419 | 370 | config, option_name, override_option_name, overrides=None, |
|
420 | 371 | basetemp=None, prefix='test'): |
|
421 | 372 | """ |
|
422 | 373 | Find a configuration file and apply overrides for the given `prefix`. |
|
423 | 374 | """ |
|
424 | 375 | config_file = ( |
|
425 | 376 | config.getoption(option_name) or config.getini(option_name)) |
|
426 | 377 | if not config_file: |
|
427 | 378 | pytest.exit( |
|
428 | 379 | "Configuration error, could not extract {}.".format(option_name)) |
|
429 | 380 | |
|
430 | 381 | overrides = overrides or [] |
|
431 | 382 | config_override = config.getoption(override_option_name) |
|
432 | 383 | if config_override: |
|
433 | 384 | overrides.append(config_override) |
|
434 | 385 | temp_ini_file = TestINI( |
|
435 | 386 | config_file, ini_params=overrides, new_file_prefix=prefix, |
|
436 | 387 | dir=basetemp) |
|
437 | 388 | |
|
438 | 389 | return temp_ini_file.create() |
|
439 | 390 | |
|
440 | 391 | |
|
441 | 392 | def _setup_pylons_environment(pylons_config, http_environ): |
|
442 | 393 | current_path = os.getcwd() |
|
443 | 394 | pylonsapp = loadapp( |
|
444 | 395 | 'config:' + pylons_config, relative_to=current_path) |
|
445 | 396 | |
|
446 | 397 | # Using rhodecode.CONFIG which is assigned during "load_environment". |
|
447 | 398 | # The indirect approach is used, because "pylonsapp" may actually be |
|
448 | 399 | # the Pyramid application. |
|
449 | 400 | pylonsapp_config = rhodecode.CONFIG |
|
450 | 401 | _init_stack(pylonsapp_config, environ=http_environ) |
|
451 | 402 | |
|
452 | 403 | # For compatibility add the attribute "config" which would be |
|
453 | 404 | # present on the Pylons application. |
|
454 | 405 | pylonsapp.config = pylonsapp_config |
|
455 | 406 | return pylonsapp |
|
456 | 407 | |
|
457 | 408 | |
|
458 | 409 | def _init_stack(config=None, environ=None): |
|
459 | 410 | if not config: |
|
460 | 411 | config = pylons.test.pylonsapp.config |
|
461 | 412 | if not environ: |
|
462 | 413 | environ = {} |
|
463 | 414 | pylons.url._push_object(URLGenerator(config['routes.map'], environ or {})) |
|
464 | 415 | pylons.app_globals._push_object(config['pylons.app_globals']) |
|
465 | 416 | pylons.config._push_object(config) |
|
466 | 417 | pylons.tmpl_context._push_object(ContextObj()) |
|
467 | 418 | # Initialize a translator for tests that utilize i18n |
|
468 | 419 | translator = _get_translator(pylons.config.get('lang')) |
|
469 | 420 | pylons.translator._push_object(translator) |
|
470 | 421 | pylons.session._push_object(SessionObject(environ or {})) |
|
471 | 422 | pylons.request._push_object(webob.Request.blank('', environ=environ)) |
@@ -1,716 +1,707 b'' | |||
|
1 | 1 | |
|
2 | 2 | |
|
3 | 3 | ################################################################################ |
|
4 | 4 | ## RHODECODE ENTERPRISE CONFIGURATION ## |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ## EMAIL CONFIGURATION ## |
|
13 | 13 | ## Uncomment and replace with the email address which should receive ## |
|
14 | 14 | ## any error reports after an application crash ## |
|
15 | 15 | ## Additionally these settings will be used by the RhodeCode mailing system ## |
|
16 | 16 | ################################################################################ |
|
17 | 17 | |
|
18 | 18 | ## prefix all emails subjects with given prefix, helps filtering out emails |
|
19 | 19 | #email_prefix = [RhodeCode] |
|
20 | 20 | |
|
21 | 21 | ## email FROM address all mails will be sent |
|
22 | 22 | #app_email_from = rhodecode-noreply@localhost |
|
23 | 23 | |
|
24 | 24 | ## Uncomment and replace with the address which should receive any error report |
|
25 | 25 | ## note: using appenlight for error handling doesn't need this to be uncommented |
|
26 | 26 | #email_to = admin@localhost |
|
27 | 27 | |
|
28 | 28 | ## in case of Application errors, sent an error email form |
|
29 | 29 | #error_email_from = rhodecode_error@localhost |
|
30 | 30 | |
|
31 | 31 | ## additional error message to be send in case of server crash |
|
32 | 32 | #error_message = |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | #smtp_server = mail.server.com |
|
36 | 36 | #smtp_username = |
|
37 | 37 | #smtp_password = |
|
38 | 38 | #smtp_port = |
|
39 | 39 | #smtp_use_tls = false |
|
40 | 40 | #smtp_use_ssl = true |
|
41 | 41 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
42 | 42 | #smtp_auth = |
|
43 | 43 | |
|
44 | 44 | [server:main] |
|
45 | 45 | ## COMMON ## |
|
46 | 46 | host = 0.0.0.0 |
|
47 | 47 | port = 5000 |
|
48 | 48 | |
|
49 | 49 | ################################## |
|
50 | 50 | ## WAITRESS WSGI SERVER ## |
|
51 | 51 | ## Recommended for Development ## |
|
52 | 52 | ################################## |
|
53 | 53 | |
|
54 | 54 | use = egg:waitress#main |
|
55 | 55 | ## number of worker threads |
|
56 | 56 | threads = 5 |
|
57 | 57 | ## MAX BODY SIZE 100GB |
|
58 | 58 | max_request_body_size = 107374182400 |
|
59 | 59 | ## Use poll instead of select, fixes file descriptors limits problems. |
|
60 | 60 | ## May not work on old windows systems. |
|
61 | 61 | asyncore_use_poll = true |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | ########################## |
|
65 | 65 | ## GUNICORN WSGI SERVER ## |
|
66 | 66 | ########################## |
|
67 | 67 | ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini> |
|
68 | 68 | |
|
69 | 69 | #use = egg:gunicorn#main |
|
70 | 70 | ## Sets the number of process workers. You must set `instance_id = *` |
|
71 | 71 | ## when this option is set to more than one worker, recommended |
|
72 | 72 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers |
|
73 | 73 | ## The `instance_id = *` must be set in the [app:main] section below |
|
74 | 74 | #workers = 2 |
|
75 | 75 | ## number of threads for each of the worker, must be set to 1 for gevent |
|
76 | 76 | ## generally recommened to be at 1 |
|
77 | 77 | #threads = 1 |
|
78 | 78 | ## process name |
|
79 | 79 | #proc_name = rhodecode |
|
80 | 80 | ## type of worker class, one of sync, gevent |
|
81 | 81 | ## recommended for bigger setup is using of of other than sync one |
|
82 | 82 | #worker_class = sync |
|
83 | 83 | ## The maximum number of simultaneous clients. Valid only for Gevent |
|
84 | 84 | #worker_connections = 10 |
|
85 | 85 | ## max number of requests that worker will handle before being gracefully |
|
86 | 86 | ## restarted, could prevent memory leaks |
|
87 | 87 | #max_requests = 1000 |
|
88 | 88 | #max_requests_jitter = 30 |
|
89 | 89 | ## amount of time a worker can spend with handling a request before it |
|
90 | 90 | ## gets killed and restarted. Set to 6hrs |
|
91 | 91 | #timeout = 21600 |
|
92 | 92 | |
|
93 | 93 | ## UWSGI ## |
|
94 | 94 | ## run with uwsgi --ini-paste-logged <inifile.ini> |
|
95 | 95 | #[uwsgi] |
|
96 | 96 | #socket = /tmp/uwsgi.sock |
|
97 | 97 | #master = true |
|
98 | 98 | #http = 127.0.0.1:5000 |
|
99 | 99 | |
|
100 | 100 | ## set as deamon and redirect all output to file |
|
101 | 101 | #daemonize = ./uwsgi_rhodecode.log |
|
102 | 102 | |
|
103 | 103 | ## master process PID |
|
104 | 104 | #pidfile = ./uwsgi_rhodecode.pid |
|
105 | 105 | |
|
106 | 106 | ## stats server with workers statistics, use uwsgitop |
|
107 | 107 | ## for monitoring, `uwsgitop 127.0.0.1:1717` |
|
108 | 108 | #stats = 127.0.0.1:1717 |
|
109 | 109 | #memory-report = true |
|
110 | 110 | |
|
111 | 111 | ## log 5XX errors |
|
112 | 112 | #log-5xx = true |
|
113 | 113 | |
|
114 | 114 | ## Set the socket listen queue size. |
|
115 | 115 | #listen = 256 |
|
116 | 116 | |
|
117 | 117 | ## Gracefully Reload workers after the specified amount of managed requests |
|
118 | 118 | ## (avoid memory leaks). |
|
119 | 119 | #max-requests = 1000 |
|
120 | 120 | |
|
121 | 121 | ## enable large buffers |
|
122 | 122 | #buffer-size=65535 |
|
123 | 123 | |
|
124 | 124 | ## socket and http timeouts ## |
|
125 | 125 | #http-timeout=3600 |
|
126 | 126 | #socket-timeout=3600 |
|
127 | 127 | |
|
128 | 128 | ## Log requests slower than the specified number of milliseconds. |
|
129 | 129 | #log-slow = 10 |
|
130 | 130 | |
|
131 | 131 | ## Exit if no app can be loaded. |
|
132 | 132 | #need-app = true |
|
133 | 133 | |
|
134 | 134 | ## Set lazy mode (load apps in workers instead of master). |
|
135 | 135 | #lazy = true |
|
136 | 136 | |
|
137 | 137 | ## scaling ## |
|
138 | 138 | ## set cheaper algorithm to use, if not set default will be used |
|
139 | 139 | #cheaper-algo = spare |
|
140 | 140 | |
|
141 | 141 | ## minimum number of workers to keep at all times |
|
142 | 142 | #cheaper = 1 |
|
143 | 143 | |
|
144 | 144 | ## number of workers to spawn at startup |
|
145 | 145 | #cheaper-initial = 1 |
|
146 | 146 | |
|
147 | 147 | ## maximum number of workers that can be spawned |
|
148 | 148 | #workers = 4 |
|
149 | 149 | |
|
150 | 150 | ## how many workers should be spawned at a time |
|
151 | 151 | #cheaper-step = 1 |
|
152 | 152 | |
|
153 | 153 | ## prefix middleware for RhodeCode. |
|
154 | 154 | ## recommended when using proxy setup. |
|
155 | 155 | ## allows to set RhodeCode under a prefix in server. |
|
156 | 156 | ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well. |
|
157 | 157 | ## optionally set prefix like: `prefix = /<your-prefix>` |
|
158 | 158 | [filter:proxy-prefix] |
|
159 | 159 | use = egg:PasteDeploy#prefix |
|
160 | 160 | prefix = / |
|
161 | 161 | |
|
162 | 162 | [app:main] |
|
163 | 163 | is_test = True |
|
164 | 164 | use = egg:rhodecode-enterprise-ce |
|
165 | 165 | |
|
166 | 166 | ## enable proxy prefix middleware, defined above |
|
167 | 167 | #filter-with = proxy-prefix |
|
168 | 168 | |
|
169 | 169 | |
|
170 | 170 | ## RHODECODE PLUGINS ## |
|
171 | 171 | rhodecode.includes = rhodecode.api |
|
172 | 172 | |
|
173 | 173 | # api prefix url |
|
174 | 174 | rhodecode.api.url = /_admin/api |
|
175 | 175 | |
|
176 | 176 | |
|
177 | 177 | ## END RHODECODE PLUGINS ## |
|
178 | 178 | |
|
179 | 179 | ## encryption key used to encrypt social plugin tokens, |
|
180 | 180 | ## remote_urls with credentials etc, if not set it defaults to |
|
181 | 181 | ## `beaker.session.secret` |
|
182 | 182 | #rhodecode.encrypted_values.secret = |
|
183 | 183 | |
|
184 | 184 | ## decryption strict mode (enabled by default). It controls if decryption raises |
|
185 | 185 | ## `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
186 | 186 | #rhodecode.encrypted_values.strict = false |
|
187 | 187 | |
|
188 | 188 | ## return gzipped responses from Rhodecode (static files/application) |
|
189 | 189 | gzip_responses = false |
|
190 | 190 | |
|
191 | 191 | ## autogenerate javascript routes file on startup |
|
192 | 192 | generate_js_files = false |
|
193 | 193 | |
|
194 | 194 | ## Optional Languages |
|
195 | 195 | ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
196 | 196 | lang = en |
|
197 | 197 | |
|
198 | 198 | ## perform a full repository scan on each server start, this should be |
|
199 | 199 | ## set to false after first startup, to allow faster server restarts. |
|
200 | 200 | startup.import_repos = true |
|
201 | 201 | |
|
202 | 202 | ## Uncomment and set this path to use archive download cache. |
|
203 | 203 | ## Once enabled, generated archives will be cached at this location |
|
204 | 204 | ## and served from the cache during subsequent requests for the same archive of |
|
205 | 205 | ## the repository. |
|
206 | 206 | #archive_cache_dir = /tmp/tarballcache |
|
207 | 207 | |
|
208 | 208 | ## change this to unique ID for security |
|
209 | 209 | app_instance_uuid = rc-production |
|
210 | 210 | |
|
211 | 211 | ## cut off limit for large diffs (size in bytes) |
|
212 | 212 | cut_off_limit_diff = 1024000 |
|
213 | 213 | cut_off_limit_file = 256000 |
|
214 | 214 | |
|
215 | 215 | ## use cache version of scm repo everywhere |
|
216 | 216 | vcs_full_cache = false |
|
217 | 217 | |
|
218 | 218 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
219 | 219 | ## Normally this is controlled by proper http flags sent from http server |
|
220 | 220 | force_https = false |
|
221 | 221 | |
|
222 | 222 | ## use Strict-Transport-Security headers |
|
223 | 223 | use_htsts = false |
|
224 | 224 | |
|
225 | 225 | ## number of commits stats will parse on each iteration |
|
226 | 226 | commit_parse_limit = 25 |
|
227 | 227 | |
|
228 | 228 | ## git rev filter option, --all is the default filter, if you need to |
|
229 | 229 | ## hide all refs in changelog switch this to --branches --tags |
|
230 | 230 | git_rev_filter = --all |
|
231 | 231 | |
|
232 | 232 | # Set to true if your repos are exposed using the dumb protocol |
|
233 | 233 | git_update_server_info = false |
|
234 | 234 | |
|
235 | 235 | ## RSS/ATOM feed options |
|
236 | 236 | rss_cut_off_limit = 256000 |
|
237 | 237 | rss_items_per_page = 10 |
|
238 | 238 | rss_include_diff = false |
|
239 | 239 | |
|
240 | 240 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
241 | 241 | ## url that does rewrites to _admin/gists/<gistid>. |
|
242 | 242 | ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
243 | 243 | ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid> |
|
244 | 244 | gist_alias_url = |
|
245 | 245 | |
|
246 | 246 | ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be |
|
247 | 247 | ## used for access. |
|
248 | 248 | ## Adding ?auth_token = <token> to the url authenticates this request as if it |
|
249 | 249 | ## came from the the logged in user who own this authentication token. |
|
250 | 250 | ## |
|
251 | 251 | ## Syntax is <ControllerClass>:<function_pattern>. |
|
252 | 252 | ## To enable access to raw_files put `FilesController:raw`. |
|
253 | 253 | ## To enable access to patches add `ChangesetController:changeset_patch`. |
|
254 | 254 | ## The list should be "," separated and on a single line. |
|
255 | 255 | ## |
|
256 | 256 | ## Recommended controllers to enable: |
|
257 | 257 | # ChangesetController:changeset_patch, |
|
258 | 258 | # ChangesetController:changeset_raw, |
|
259 | 259 | # FilesController:raw, |
|
260 | 260 | # FilesController:archivefile, |
|
261 | 261 | # GistsController:*, |
|
262 | 262 | api_access_controllers_whitelist = |
|
263 | 263 | |
|
264 | 264 | ## default encoding used to convert from and to unicode |
|
265 | 265 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
266 | 266 | default_encoding = UTF-8 |
|
267 | 267 | |
|
268 | 268 | ## instance-id prefix |
|
269 | 269 | ## a prefix key for this instance used for cache invalidation when running |
|
270 | 270 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
271 | 271 | ## all running rhodecode instances. Leave empty if you don't use it |
|
272 | 272 | instance_id = |
|
273 | 273 | |
|
274 | 274 | ## Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
275 | 275 | ## of an authentication plugin also if it is disabled by it's settings. |
|
276 | 276 | ## This could be useful if you are unable to log in to the system due to broken |
|
277 | 277 | ## authentication settings. Then you can enable e.g. the internal rhodecode auth |
|
278 | 278 | ## module to log in again and fix the settings. |
|
279 | 279 | ## |
|
280 | 280 | ## Available builtin plugin IDs (hash is part of the ID): |
|
281 | 281 | ## egg:rhodecode-enterprise-ce#rhodecode |
|
282 | 282 | ## egg:rhodecode-enterprise-ce#pam |
|
283 | 283 | ## egg:rhodecode-enterprise-ce#ldap |
|
284 | 284 | ## egg:rhodecode-enterprise-ce#jasig_cas |
|
285 | 285 | ## egg:rhodecode-enterprise-ce#headers |
|
286 | 286 | ## egg:rhodecode-enterprise-ce#crowd |
|
287 | 287 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
288 | 288 | |
|
289 | 289 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
290 | 290 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
291 | 291 | ## handling that causing a series of failed authentication calls. |
|
292 | 292 | ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
293 | 293 | ## This will be served instead of default 401 on bad authnetication |
|
294 | 294 | auth_ret_code = |
|
295 | 295 | |
|
296 | 296 | ## use special detection method when serving auth_ret_code, instead of serving |
|
297 | 297 | ## ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
298 | 298 | ## and then serve auth_ret_code to clients |
|
299 | 299 | auth_ret_code_detection = false |
|
300 | 300 | |
|
301 | 301 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
302 | 302 | ## codes don't break the transactions while 4XX codes do |
|
303 | 303 | lock_ret_code = 423 |
|
304 | 304 | |
|
305 | 305 | ## allows to change the repository location in settings page |
|
306 | 306 | allow_repo_location_change = true |
|
307 | 307 | |
|
308 | 308 | ## allows to setup custom hooks in settings page |
|
309 | 309 | allow_custom_hooks_settings = true |
|
310 | 310 | |
|
311 | 311 | ## generated license token, goto license page in RhodeCode settings to obtain |
|
312 | 312 | ## new token |
|
313 | 313 | license_token = abra-cada-bra1-rce3 |
|
314 | 314 | |
|
315 | 315 | ## supervisor connection uri, for managing supervisor and logs. |
|
316 | 316 | supervisor.uri = |
|
317 | 317 | ## supervisord group name/id we only want this RC instance to handle |
|
318 | 318 | supervisor.group_id = dev |
|
319 | 319 | |
|
320 | 320 | ## Display extended labs settings |
|
321 | 321 | labs_settings_active = true |
|
322 | 322 | |
|
323 | 323 | #################################### |
|
324 | 324 | ### CELERY CONFIG #### |
|
325 | 325 | #################################### |
|
326 | 326 | use_celery = false |
|
327 | 327 | broker.host = localhost |
|
328 | 328 | broker.vhost = rabbitmqhost |
|
329 | 329 | broker.port = 5672 |
|
330 | 330 | broker.user = rabbitmq |
|
331 | 331 | broker.password = qweqwe |
|
332 | 332 | |
|
333 | 333 | celery.imports = rhodecode.lib.celerylib.tasks |
|
334 | 334 | |
|
335 | 335 | celery.result.backend = amqp |
|
336 | 336 | celery.result.dburi = amqp:// |
|
337 | 337 | celery.result.serialier = json |
|
338 | 338 | |
|
339 | 339 | #celery.send.task.error.emails = true |
|
340 | 340 | #celery.amqp.task.result.expires = 18000 |
|
341 | 341 | |
|
342 | 342 | celeryd.concurrency = 2 |
|
343 | 343 | #celeryd.log.file = celeryd.log |
|
344 | 344 | celeryd.log.level = debug |
|
345 | 345 | celeryd.max.tasks.per.child = 1 |
|
346 | 346 | |
|
347 | 347 | ## tasks will never be sent to the queue, but executed locally instead. |
|
348 | 348 | celery.always.eager = false |
|
349 | 349 | |
|
350 | 350 | #################################### |
|
351 | 351 | ### BEAKER CACHE #### |
|
352 | 352 | #################################### |
|
353 | 353 | # default cache dir for templates. Putting this into a ramdisk |
|
354 | 354 | ## can boost performance, eg. %(here)s/data_ramdisk |
|
355 | 355 | cache_dir = %(here)s/data |
|
356 | 356 | |
|
357 | 357 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
358 | 358 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
359 | 359 | beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data |
|
360 | 360 | beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock |
|
361 | 361 | |
|
362 | 362 | beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long |
|
363 | 363 | |
|
364 | 364 | beaker.cache.super_short_term.type = memory |
|
365 | 365 | beaker.cache.super_short_term.expire = 1 |
|
366 | 366 | beaker.cache.super_short_term.key_length = 256 |
|
367 | 367 | |
|
368 | 368 | beaker.cache.short_term.type = memory |
|
369 | 369 | beaker.cache.short_term.expire = 60 |
|
370 | 370 | beaker.cache.short_term.key_length = 256 |
|
371 | 371 | |
|
372 | 372 | beaker.cache.long_term.type = memory |
|
373 | 373 | beaker.cache.long_term.expire = 36000 |
|
374 | 374 | beaker.cache.long_term.key_length = 256 |
|
375 | 375 | |
|
376 | 376 | beaker.cache.sql_cache_short.type = memory |
|
377 | 377 | beaker.cache.sql_cache_short.expire = 1 |
|
378 | 378 | beaker.cache.sql_cache_short.key_length = 256 |
|
379 | 379 | |
|
380 | 380 | ## default is memory cache, configure only if required |
|
381 | 381 | ## using multi-node or multi-worker setup |
|
382 | 382 | #beaker.cache.auth_plugins.type = ext:database |
|
383 | 383 | #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock |
|
384 | 384 | #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode |
|
385 | 385 | #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode |
|
386 | 386 | #beaker.cache.auth_plugins.sa.pool_recycle = 3600 |
|
387 | 387 | #beaker.cache.auth_plugins.sa.pool_size = 10 |
|
388 | 388 | #beaker.cache.auth_plugins.sa.max_overflow = 0 |
|
389 | 389 | |
|
390 | 390 | beaker.cache.repo_cache_long.type = memorylru_base |
|
391 | 391 | beaker.cache.repo_cache_long.max_items = 4096 |
|
392 | 392 | beaker.cache.repo_cache_long.expire = 2592000 |
|
393 | 393 | |
|
394 | 394 | ## default is memorylru_base cache, configure only if required |
|
395 | 395 | ## using multi-node or multi-worker setup |
|
396 | 396 | #beaker.cache.repo_cache_long.type = ext:memcached |
|
397 | 397 | #beaker.cache.repo_cache_long.url = localhost:11211 |
|
398 | 398 | #beaker.cache.repo_cache_long.expire = 1209600 |
|
399 | 399 | #beaker.cache.repo_cache_long.key_length = 256 |
|
400 | 400 | |
|
401 | 401 | #################################### |
|
402 | 402 | ### BEAKER SESSION #### |
|
403 | 403 | #################################### |
|
404 | 404 | |
|
405 | 405 | ## .session.type is type of storage options for the session, current allowed |
|
406 | 406 | ## types are file, ext:memcached, ext:database, and memory (default). |
|
407 | 407 | beaker.session.type = file |
|
408 | 408 | beaker.session.data_dir = %(here)s/rc/data/sessions/data |
|
409 | 409 | |
|
410 | 410 | ## db based session, fast, and allows easy management over logged in users |
|
411 | 411 | #beaker.session.type = ext:database |
|
412 | 412 | #beaker.session.table_name = db_session |
|
413 | 413 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
414 | 414 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
415 | 415 | #beaker.session.sa.pool_recycle = 3600 |
|
416 | 416 | #beaker.session.sa.echo = false |
|
417 | 417 | |
|
418 | 418 | beaker.session.key = rhodecode |
|
419 | 419 | beaker.session.secret = test-rc-uytcxaz |
|
420 | 420 | beaker.session.lock_dir = %(here)s/rc/data/sessions/lock |
|
421 | 421 | |
|
422 | 422 | ## Secure encrypted cookie. Requires AES and AES python libraries |
|
423 | 423 | ## you must disable beaker.session.secret to use this |
|
424 | 424 | #beaker.session.encrypt_key = <key_for_encryption> |
|
425 | 425 | #beaker.session.validate_key = <validation_key> |
|
426 | 426 | |
|
427 | 427 | ## sets session as invalid(also logging out user) if it haven not been |
|
428 | 428 | ## accessed for given amount of time in seconds |
|
429 | 429 | beaker.session.timeout = 2592000 |
|
430 | 430 | beaker.session.httponly = true |
|
431 | 431 | ## Path to use for the cookie. |
|
432 | 432 | #beaker.session.cookie_path = /<your-prefix> |
|
433 | 433 | |
|
434 | 434 | ## uncomment for https secure cookie |
|
435 | 435 | beaker.session.secure = false |
|
436 | 436 | |
|
437 | 437 | ## auto save the session to not to use .save() |
|
438 | 438 | beaker.session.auto = false |
|
439 | 439 | |
|
440 | 440 | ## default cookie expiration time in seconds, set to `true` to set expire |
|
441 | 441 | ## at browser close |
|
442 | 442 | #beaker.session.cookie_expires = 3600 |
|
443 | 443 | |
|
444 | 444 | ################################### |
|
445 | 445 | ## SEARCH INDEXING CONFIGURATION ## |
|
446 | 446 | ################################### |
|
447 | 447 | ## Full text search indexer is available in rhodecode-tools under |
|
448 | 448 | ## `rhodecode-tools index` command |
|
449 | 449 | |
|
450 | 450 | # WHOOSH Backend, doesn't require additional services to run |
|
451 | 451 | # it works good with few dozen repos |
|
452 | 452 | search.module = rhodecode.lib.index.whoosh |
|
453 | 453 | search.location = %(here)s/data/index |
|
454 | 454 | |
|
455 | 455 | ######################################## |
|
456 | 456 | ### CHANNELSTREAM CONFIG #### |
|
457 | 457 | ######################################## |
|
458 | 458 | ## channelstream enables persistent connections and live notification |
|
459 | 459 | ## in the system. It's also used by the chat system |
|
460 | 460 | |
|
461 | 461 | channelstream.enabled = false |
|
462 | 462 | # location of channelstream server on the backend |
|
463 | 463 | channelstream.server = 127.0.0.1:9800 |
|
464 | 464 | ## location of the channelstream server from outside world |
|
465 | 465 | ## most likely this would be an http server special backend URL, that handles |
|
466 | 466 | ## websocket connections see nginx example for config |
|
467 | 467 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
468 | 468 | channelstream.secret = secret |
|
469 | 469 | channelstream.history.location = %(here)s/channelstream_history |
|
470 | 470 | |
|
471 | 471 | |
|
472 | 472 | ################################### |
|
473 | 473 | ## APPENLIGHT CONFIG ## |
|
474 | 474 | ################################### |
|
475 | 475 | |
|
476 | 476 | ## Appenlight is tailored to work with RhodeCode, see |
|
477 | 477 | ## http://appenlight.com for details how to obtain an account |
|
478 | 478 | |
|
479 | 479 | ## appenlight integration enabled |
|
480 | 480 | appenlight = false |
|
481 | 481 | |
|
482 | 482 | appenlight.server_url = https://api.appenlight.com |
|
483 | 483 | appenlight.api_key = YOUR_API_KEY |
|
484 | 484 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
485 | 485 | |
|
486 | 486 | # used for JS client |
|
487 | 487 | appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
488 | 488 | |
|
489 | 489 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
490 | 490 | |
|
491 | 491 | ## enables 404 error logging (default False) |
|
492 | 492 | appenlight.report_404 = false |
|
493 | 493 | |
|
494 | 494 | ## time in seconds after request is considered being slow (default 1) |
|
495 | 495 | appenlight.slow_request_time = 1 |
|
496 | 496 | |
|
497 | 497 | ## record slow requests in application |
|
498 | 498 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
499 | 499 | appenlight.slow_requests = true |
|
500 | 500 | |
|
501 | 501 | ## enable hooking to application loggers |
|
502 | 502 | appenlight.logging = true |
|
503 | 503 | |
|
504 | 504 | ## minimum log level for log capture |
|
505 | 505 | appenlight.logging.level = WARNING |
|
506 | 506 | |
|
507 | 507 | ## send logs only from erroneous/slow requests |
|
508 | 508 | ## (saves API quota for intensive logging) |
|
509 | 509 | appenlight.logging_on_error = false |
|
510 | 510 | |
|
511 | 511 | ## list of additonal keywords that should be grabbed from environ object |
|
512 | 512 | ## can be string with comma separated list of words in lowercase |
|
513 | 513 | ## (by default client will always send following info: |
|
514 | 514 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
515 | 515 | ## start with HTTP* this list be extended with additional keywords here |
|
516 | 516 | appenlight.environ_keys_whitelist = |
|
517 | 517 | |
|
518 | 518 | ## list of keywords that should be blanked from request object |
|
519 | 519 | ## can be string with comma separated list of words in lowercase |
|
520 | 520 | ## (by default client will always blank keys that contain following words |
|
521 | 521 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
522 | 522 | ## this list be extended with additional keywords set here |
|
523 | 523 | appenlight.request_keys_blacklist = |
|
524 | 524 | |
|
525 | 525 | ## list of namespaces that should be ignores when gathering log entries |
|
526 | 526 | ## can be string with comma separated list of namespaces |
|
527 | 527 | ## (by default the client ignores own entries: appenlight_client.client) |
|
528 | 528 | appenlight.log_namespace_blacklist = |
|
529 | 529 | |
|
530 | 530 | |
|
531 | 531 | ################################################################################ |
|
532 | 532 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
533 | 533 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
534 | 534 | ## execute malicious code after an exception is raised. ## |
|
535 | 535 | ################################################################################ |
|
536 | 536 | set debug = false |
|
537 | 537 | |
|
538 | 538 | |
|
539 | 539 | ############## |
|
540 | 540 | ## STYLING ## |
|
541 | 541 | ############## |
|
542 | 542 | debug_style = false |
|
543 | 543 | |
|
544 | 544 | ######################################################### |
|
545 | 545 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
546 | 546 | ######################################################### |
|
547 | 547 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db |
|
548 | 548 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test |
|
549 | 549 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test |
|
550 | 550 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db |
|
551 | 551 | |
|
552 | 552 | # see sqlalchemy docs for other advanced settings |
|
553 | 553 | |
|
554 | 554 | ## print the sql statements to output |
|
555 | 555 | sqlalchemy.db1.echo = false |
|
556 | 556 | ## recycle the connections after this ammount of seconds |
|
557 | 557 | sqlalchemy.db1.pool_recycle = 3600 |
|
558 | 558 | sqlalchemy.db1.convert_unicode = true |
|
559 | 559 | |
|
560 | 560 | ## the number of connections to keep open inside the connection pool. |
|
561 | 561 | ## 0 indicates no limit |
|
562 | 562 | #sqlalchemy.db1.pool_size = 5 |
|
563 | 563 | |
|
564 | 564 | ## the number of connections to allow in connection pool "overflow", that is |
|
565 | 565 | ## connections that can be opened above and beyond the pool_size setting, |
|
566 | 566 | ## which defaults to five. |
|
567 | 567 | #sqlalchemy.db1.max_overflow = 10 |
|
568 | 568 | |
|
569 | 569 | |
|
570 | 570 | ################## |
|
571 | 571 | ### VCS CONFIG ### |
|
572 | 572 | ################## |
|
573 | 573 | vcs.server.enable = true |
|
574 | 574 | vcs.server = localhost:9901 |
|
575 | 575 | |
|
576 | 576 | ## Web server connectivity protocol, responsible for web based VCS operatations |
|
577 | 577 | ## Available protocols are: |
|
578 | ## `pyro4` - using pyro4 server | |
|
579 | 578 | ## `http` - using http-rpc backend |
|
580 | 579 | vcs.server.protocol = http |
|
581 | 580 | |
|
582 | 581 | ## Push/Pull operations protocol, available options are: |
|
583 | ## `pyro4` - using pyro4 server | |
|
584 | 582 | ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended |
|
585 | 583 | ## `vcsserver.scm_app` - internal app (EE only) |
|
586 | 584 | vcs.scm_app_implementation = http |
|
587 | 585 | |
|
588 | 586 | ## Push/Pull operations hooks protocol, available options are: |
|
589 | ## `pyro4` - using pyro4 server | |
|
590 | 587 | ## `http` - using http-rpc backend |
|
591 | 588 | vcs.hooks.protocol = http |
|
592 | 589 | |
|
593 | 590 | vcs.server.log_level = debug |
|
594 | 591 | ## Start VCSServer with this instance as a subprocess, usefull for development |
|
595 | 592 | vcs.start_server = false |
|
596 | 593 | |
|
597 | 594 | ## List of enabled VCS backends, available options are: |
|
598 | 595 | ## `hg` - mercurial |
|
599 | 596 | ## `git` - git |
|
600 | 597 | ## `svn` - subversion |
|
601 | 598 | vcs.backends = hg, git, svn |
|
602 | 599 | |
|
603 | 600 | vcs.connection_timeout = 3600 |
|
604 | 601 | ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
605 | 602 | ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible |
|
606 | 603 | #vcs.svn.compatible_version = pre-1.8-compatible |
|
607 | 604 | |
|
608 | 605 | |
|
609 | 606 | ############################################################ |
|
610 | 607 | ### Subversion proxy support (mod_dav_svn) ### |
|
611 | 608 | ### Maps RhodeCode repo groups into SVN paths for Apache ### |
|
612 | 609 | ############################################################ |
|
613 | 610 | ## Enable or disable the config file generation. |
|
614 | 611 | svn.proxy.generate_config = false |
|
615 | 612 | ## Generate config file with `SVNListParentPath` set to `On`. |
|
616 | 613 | svn.proxy.list_parent_path = true |
|
617 | 614 | ## Set location and file name of generated config file. |
|
618 | 615 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
619 | 616 | ## File system path to the directory containing the repositories served by |
|
620 | 617 | ## RhodeCode. |
|
621 | 618 | svn.proxy.parent_path_root = /path/to/repo_store |
|
622 | 619 | ## Used as a prefix to the <Location> block in the generated config file. In |
|
623 | 620 | ## most cases it should be set to `/`. |
|
624 | 621 | svn.proxy.location_root = / |
|
625 | 622 | |
|
626 | 623 | |
|
627 | 624 | ################################ |
|
628 | 625 | ### LOGGING CONFIGURATION #### |
|
629 | 626 | ################################ |
|
630 | 627 | [loggers] |
|
631 |
keys = root, routes, rhodecode, sqlalchemy, beaker, |
|
|
628 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates | |
|
632 | 629 | |
|
633 | 630 | [handlers] |
|
634 | 631 | keys = console, console_sql |
|
635 | 632 | |
|
636 | 633 | [formatters] |
|
637 | 634 | keys = generic, color_formatter, color_formatter_sql |
|
638 | 635 | |
|
639 | 636 | ############# |
|
640 | 637 | ## LOGGERS ## |
|
641 | 638 | ############# |
|
642 | 639 | [logger_root] |
|
643 | 640 | level = NOTSET |
|
644 | 641 | handlers = console |
|
645 | 642 | |
|
646 | 643 | [logger_routes] |
|
647 | 644 | level = DEBUG |
|
648 | 645 | handlers = |
|
649 | 646 | qualname = routes.middleware |
|
650 | 647 | ## "level = DEBUG" logs the route matched and routing variables. |
|
651 | 648 | propagate = 1 |
|
652 | 649 | |
|
653 | 650 | [logger_beaker] |
|
654 | 651 | level = DEBUG |
|
655 | 652 | handlers = |
|
656 | 653 | qualname = beaker.container |
|
657 | 654 | propagate = 1 |
|
658 | 655 | |
|
659 | [logger_pyro4] | |
|
660 | level = DEBUG | |
|
661 | handlers = | |
|
662 | qualname = Pyro4 | |
|
663 | propagate = 1 | |
|
664 | ||
|
665 | 656 | [logger_templates] |
|
666 | 657 | level = INFO |
|
667 | 658 | handlers = |
|
668 | 659 | qualname = pylons.templating |
|
669 | 660 | propagate = 1 |
|
670 | 661 | |
|
671 | 662 | [logger_rhodecode] |
|
672 | 663 | level = DEBUG |
|
673 | 664 | handlers = |
|
674 | 665 | qualname = rhodecode |
|
675 | 666 | propagate = 1 |
|
676 | 667 | |
|
677 | 668 | [logger_sqlalchemy] |
|
678 | 669 | level = ERROR |
|
679 | 670 | handlers = console_sql |
|
680 | 671 | qualname = sqlalchemy.engine |
|
681 | 672 | propagate = 0 |
|
682 | 673 | |
|
683 | 674 | ############## |
|
684 | 675 | ## HANDLERS ## |
|
685 | 676 | ############## |
|
686 | 677 | |
|
687 | 678 | [handler_console] |
|
688 | 679 | class = StreamHandler |
|
689 | 680 | args = (sys.stderr,) |
|
690 | 681 | level = DEBUG |
|
691 | 682 | formatter = generic |
|
692 | 683 | |
|
693 | 684 | [handler_console_sql] |
|
694 | 685 | class = StreamHandler |
|
695 | 686 | args = (sys.stderr,) |
|
696 | 687 | level = WARN |
|
697 | 688 | formatter = generic |
|
698 | 689 | |
|
699 | 690 | ################ |
|
700 | 691 | ## FORMATTERS ## |
|
701 | 692 | ################ |
|
702 | 693 | |
|
703 | 694 | [formatter_generic] |
|
704 |
class = rhodecode.lib.logging_formatter. |
|
|
695 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter | |
|
705 | 696 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
706 | 697 | datefmt = %Y-%m-%d %H:%M:%S |
|
707 | 698 | |
|
708 | 699 | [formatter_color_formatter] |
|
709 | 700 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
710 | 701 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
711 | 702 | datefmt = %Y-%m-%d %H:%M:%S |
|
712 | 703 | |
|
713 | 704 | [formatter_color_formatter_sql] |
|
714 | 705 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
715 | 706 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
716 | 707 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,100 +1,96 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | import mock |
|
24 | 24 | import msgpack |
|
25 | 25 | import pytest |
|
26 | 26 | |
|
27 | 27 | from rhodecode.lib import vcs |
|
28 | 28 | from rhodecode.lib.vcs import client_http |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def test_uses_persistent_http_connections(caplog, vcsbackend_hg): |
|
32 | 32 | repo = vcsbackend_hg.repo |
|
33 | 33 | remote_call = repo._remote.branches |
|
34 | 34 | |
|
35 | 35 | with caplog.at_level(logging.INFO): |
|
36 | 36 | for x in range(5): |
|
37 | 37 | remote_call(normal=True, closed=False) |
|
38 | 38 | |
|
39 | 39 | new_connections = [ |
|
40 | 40 | r for r in caplog.record_tuples if is_new_connection(*r)] |
|
41 | 41 | assert len(new_connections) <= 1 |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | def is_new_connection(logger, level, message): |
|
45 | 45 | return ( |
|
46 | 46 | logger == 'requests.packages.urllib3.connectionpool' and |
|
47 | 47 | message.startswith('Starting new HTTP')) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | @pytest.fixture |
|
51 | 51 | def stub_session(): |
|
52 | 52 | """ |
|
53 | 53 | Stub of `requests.Session()`. |
|
54 | 54 | """ |
|
55 | 55 | session = mock.Mock() |
|
56 | 56 | session.post().content = msgpack.packb({}) |
|
57 | 57 | session.reset_mock() |
|
58 | 58 | return session |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | @pytest.fixture |
|
62 | 62 | def stub_session_factory(stub_session): |
|
63 | 63 | """ |
|
64 | 64 | Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`. |
|
65 | 65 | """ |
|
66 | 66 | session_factory = mock.Mock() |
|
67 | 67 | session_factory.return_value = stub_session |
|
68 | 68 | return session_factory |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | def test_repo_maker_uses_session_for_classmethods(stub_session_factory): |
|
72 | 72 | repo_maker = client_http.RepoMaker( |
|
73 | 73 | 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory) |
|
74 | 74 | repo_maker.example_call() |
|
75 | 75 | stub_session_factory().post.assert_called_with( |
|
76 | 76 | 'http://server_and_port/endpoint', data=mock.ANY) |
|
77 | 77 | |
|
78 | 78 | |
|
79 | 79 | def test_repo_maker_uses_session_for_instance_methods( |
|
80 | 80 | stub_session_factory, config): |
|
81 | 81 | repo_maker = client_http.RepoMaker( |
|
82 | 82 | 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory) |
|
83 | 83 | repo = repo_maker('stub_path', config) |
|
84 | 84 | repo.example_call() |
|
85 | 85 | stub_session_factory().post.assert_called_with( |
|
86 | 86 | 'http://server_and_port/endpoint', data=mock.ANY) |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | @mock.patch('rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory') |
|
90 | 90 | @mock.patch('rhodecode.lib.vcs.connection') |
|
91 | 91 | def test_connect_passes_in_the_same_session( |
|
92 | 92 | connection, session_factory_class, stub_session): |
|
93 | 93 | session_factory = session_factory_class.return_value |
|
94 | 94 | session_factory.return_value = stub_session |
|
95 | 95 | |
|
96 | 96 | vcs.connect_http('server_and_port') |
|
97 | ||
|
98 | assert connection.Hg._session_factory() == stub_session | |
|
99 | assert connection.Svn._session_factory() == stub_session | |
|
100 | assert connection.Git._session_factory() == stub_session |
@@ -1,189 +1,184 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from mock import call, patch |
|
24 | 24 | |
|
25 | 25 | from rhodecode.lib.vcs.backends.base import Reference |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | class TestMercurialRemoteRepoInvalidation(object): |
|
29 | 29 | """ |
|
30 | 30 | If the VCSServer is running with multiple processes or/and instances. |
|
31 | 31 | Operations on repositories are potentially handled by different processes |
|
32 | 32 | in a random fashion. The mercurial repository objects used in the VCSServer |
|
33 | 33 | are caching the commits of the repo. Therefore we have to invalidate the |
|
34 | 34 | VCSServer caching of these objects after a writing operation. |
|
35 | 35 | """ |
|
36 | 36 | |
|
37 | 37 | # Default reference used as a dummy during tests. |
|
38 | 38 | default_ref = Reference('branch', 'default', None) |
|
39 | 39 | |
|
40 | 40 | # Methods of vcsserver.hg.HgRemote that are "writing" operations. |
|
41 | 41 | writing_methods = [ |
|
42 | 42 | 'bookmark', |
|
43 | 43 | 'commit', |
|
44 | 44 | 'merge', |
|
45 | 45 | 'pull', |
|
46 | 46 | 'pull_cmd', |
|
47 | 47 | 'rebase', |
|
48 | 48 | 'strip', |
|
49 | 49 | 'tag', |
|
50 | 50 | ] |
|
51 | 51 | |
|
52 | 52 | @pytest.mark.parametrize('method_name, method_args', [ |
|
53 | 53 | ('_local_merge', [default_ref, None, None, None, default_ref]), |
|
54 | 54 | ('_local_pull', ['', default_ref]), |
|
55 | 55 | ('bookmark', [None]), |
|
56 | 56 | ('pull', ['', default_ref]), |
|
57 | 57 | ('remove_tag', ['mytag', None]), |
|
58 | 58 | ('strip', [None]), |
|
59 | 59 | ('tag', ['newtag', None]), |
|
60 | 60 | ]) |
|
61 | 61 | def test_method_invokes_invalidate_on_remote_repo( |
|
62 | 62 | self, method_name, method_args, backend_hg): |
|
63 | 63 | """ |
|
64 | 64 | Check that the listed methods are invalidating the VCSServer cache |
|
65 | 65 | after invoking a writing method of their remote repository object. |
|
66 | 66 | """ |
|
67 | 67 | tags = {'mytag': 'mytag-id'} |
|
68 | 68 | |
|
69 | 69 | def add_tag(name, raw_id, *args, **kwds): |
|
70 | 70 | tags[name] = raw_id |
|
71 | 71 | |
|
72 | 72 | repo = backend_hg.repo.scm_instance() |
|
73 | 73 | with patch.object(repo, '_remote') as remote: |
|
74 | 74 | remote.lookup.return_value = ('commit-id', 'commit-idx') |
|
75 | 75 | remote.tags.return_value = tags |
|
76 | 76 | remote._get_tags.return_value = tags |
|
77 | 77 | remote.tag.side_effect = add_tag |
|
78 | 78 | |
|
79 | 79 | # Invoke method. |
|
80 | 80 | method = getattr(repo, method_name) |
|
81 | 81 | method(*method_args) |
|
82 | 82 | |
|
83 | 83 | # Assert that every "writing" method is followed by an invocation |
|
84 | 84 | # of the cache invalidation method. |
|
85 | 85 | for counter, method_call in enumerate(remote.method_calls): |
|
86 | 86 | call_name = method_call[0] |
|
87 | 87 | if call_name in self.writing_methods: |
|
88 | 88 | next_call = remote.method_calls[counter + 1] |
|
89 | 89 | assert next_call == call.invalidate_vcs_cache() |
|
90 | 90 | |
|
91 | 91 | def _prepare_shadow_repo(self, pull_request): |
|
92 | 92 | """ |
|
93 | 93 | Helper that creates a shadow repo that can be used to reproduce the |
|
94 | 94 | CommitDoesNotExistError when pulling in from target and source |
|
95 | 95 | references. |
|
96 | 96 | """ |
|
97 | 97 | from rhodecode.model.pull_request import PullRequestModel |
|
98 | 98 | |
|
99 | 99 | target_vcs = pull_request.target_repo.scm_instance() |
|
100 | 100 | target_ref = pull_request.target_ref_parts |
|
101 | 101 | source_ref = pull_request.source_ref_parts |
|
102 | 102 | |
|
103 | 103 | # Create shadow repository. |
|
104 | 104 | pr = PullRequestModel() |
|
105 | 105 | workspace_id = pr._workspace_id(pull_request) |
|
106 | 106 | shadow_repository_path = target_vcs._maybe_prepare_merge_workspace( |
|
107 | 107 | workspace_id, target_ref) |
|
108 | 108 | shadow_repo = target_vcs._get_shadow_instance(shadow_repository_path) |
|
109 | 109 | |
|
110 | 110 | # This will populate the cache of the mercurial repository object |
|
111 | 111 | # inside of the VCSServer. |
|
112 | 112 | shadow_repo.get_commit() |
|
113 | 113 | |
|
114 | 114 | return shadow_repo, source_ref, target_ref |
|
115 | 115 | |
|
116 | 116 | @pytest.mark.backends('hg') |
|
117 | 117 | def test_commit_does_not_exist_error_happens(self, pr_util, pylonsapp): |
|
118 | 118 | """ |
|
119 | 119 | This test is somewhat special. It does not really test the system |
|
120 | 120 | instead it is more or less a precondition for the |
|
121 | 121 | "test_commit_does_not_exist_error_does_not_happen". It deactivates the |
|
122 | 122 | cache invalidation and asserts that the error occurs. |
|
123 | 123 | """ |
|
124 | 124 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
125 | 125 | |
|
126 | if pylonsapp.config['vcs.server.protocol'] == 'pyro4': | |
|
127 | pytest.skip('Test is intended for the HTTP protocol only.') | |
|
128 | ||
|
129 | 126 | pull_request = pr_util.create_pull_request() |
|
130 | 127 | target_vcs = pull_request.target_repo.scm_instance() |
|
131 | 128 | source_vcs = pull_request.source_repo.scm_instance() |
|
132 | 129 | shadow_repo, source_ref, target_ref = self._prepare_shadow_repo( |
|
133 | 130 | pull_request) |
|
134 | 131 | |
|
135 | 132 | # Pull from target and source references but without invalidation of |
|
136 | 133 | # RemoteRepo objects and without VCSServer caching of mercurial |
|
137 | 134 | # repository objects. |
|
138 | 135 | with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'): |
|
139 | 136 | # NOTE: Do not use patch.dict() to disable the cache because it |
|
140 | 137 | # restores the WHOLE dict and not only the patched keys. |
|
141 | 138 | shadow_repo._remote._wire['cache'] = False |
|
142 | 139 | shadow_repo._local_pull(target_vcs.path, target_ref) |
|
143 | 140 | shadow_repo._local_pull(source_vcs.path, source_ref) |
|
144 | 141 | shadow_repo._remote._wire.pop('cache') |
|
145 | 142 | |
|
146 | 143 | # Try to lookup the target_ref in shadow repo. This should work because |
|
147 | 144 | # the shadow repo is a clone of the target and always contains all off |
|
148 | 145 | # it's commits in the initial cache. |
|
149 | 146 | shadow_repo.get_commit(target_ref.commit_id) |
|
150 | 147 | |
|
151 | 148 | # If we try to lookup the source_ref it should fail because the shadow |
|
152 | 149 | # repo commit cache doesn't get invalidated. (Due to patched |
|
153 | 150 | # invalidation and caching above). |
|
154 | 151 | with pytest.raises(CommitDoesNotExistError): |
|
155 | 152 | shadow_repo.get_commit(source_ref.commit_id) |
|
156 | 153 | |
|
157 | 154 | @pytest.mark.backends('hg') |
|
158 | 155 | def test_commit_does_not_exist_error_does_not_happen( |
|
159 | 156 | self, pr_util, pylonsapp): |
|
160 | 157 | """ |
|
161 | 158 | This test simulates a pull request merge in which the pull operations |
|
162 | 159 | are handled by a different VCSServer process than all other operations. |
|
163 | 160 | Without correct cache invalidation this leads to an error when |
|
164 | 161 | retrieving the pulled commits afterwards. |
|
165 | 162 | """ |
|
166 | if pylonsapp.config['vcs.server.protocol'] == 'pyro4': | |
|
167 | pytest.skip('Test is intended for the HTTP protocol only.') | |
|
168 | 163 | |
|
169 | 164 | pull_request = pr_util.create_pull_request() |
|
170 | 165 | target_vcs = pull_request.target_repo.scm_instance() |
|
171 | 166 | source_vcs = pull_request.source_repo.scm_instance() |
|
172 | 167 | shadow_repo, source_ref, target_ref = self._prepare_shadow_repo( |
|
173 | 168 | pull_request) |
|
174 | 169 | |
|
175 | 170 | # Pull from target and source references without without VCSServer |
|
176 | 171 | # caching of mercurial repository objects but with active invalidation |
|
177 | 172 | # of RemoteRepo objects. |
|
178 | 173 | # NOTE: Do not use patch.dict() to disable the cache because it |
|
179 | 174 | # restores the WHOLE dict and not only the patched keys. |
|
180 | 175 | shadow_repo._remote._wire['cache'] = False |
|
181 | 176 | shadow_repo._local_pull(target_vcs.path, target_ref) |
|
182 | 177 | shadow_repo._local_pull(source_vcs.path, source_ref) |
|
183 | 178 | shadow_repo._remote._wire.pop('cache') |
|
184 | 179 | |
|
185 | 180 | # Try to lookup the target and source references in shadow repo. This |
|
186 | 181 | # should work because the RemoteRepo object gets invalidated during the |
|
187 | 182 | # above pull operations. |
|
188 | 183 | shadow_repo.get_commit(target_ref.commit_id) |
|
189 | 184 | shadow_repo.get_commit(source_ref.commit_id) |
@@ -1,83 +1,77 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | # RhodeCode VCSServer with HTTP Backend - configuration # |
|
3 | 3 | # # |
|
4 | 4 | ################################################################################ |
|
5 | 5 | |
|
6 | 6 | [app:main] |
|
7 | 7 | use = egg:rhodecode-vcsserver |
|
8 | 8 | |
|
9 | 9 | pyramid.default_locale_name = en |
|
10 | 10 | pyramid.includes = |
|
11 | 11 | pyramid.reload_templates = true |
|
12 | 12 | |
|
13 | 13 | # default locale used by VCS systems |
|
14 | 14 | locale = en_US.UTF-8 |
|
15 | 15 | |
|
16 | 16 | # cache regions, please don't change |
|
17 | 17 | beaker.cache.regions = repo_object |
|
18 | 18 | beaker.cache.repo_object.type = memorylru |
|
19 | 19 | beaker.cache.repo_object.max_items = 100 |
|
20 | 20 | # cache auto-expires after N seconds |
|
21 | 21 | beaker.cache.repo_object.expire = 300 |
|
22 | 22 | beaker.cache.repo_object.enabled = true |
|
23 | 23 | |
|
24 | 24 | [server:main] |
|
25 | 25 | use = egg:waitress#main |
|
26 | 26 | host = 127.0.0.1 |
|
27 | 27 | port = 9900 |
|
28 | 28 | |
|
29 | 29 | ################################ |
|
30 | 30 | ### LOGGING CONFIGURATION #### |
|
31 | 31 | ################################ |
|
32 | 32 | [loggers] |
|
33 |
keys = root, vcsserver, |
|
|
33 | keys = root, vcsserver, beaker | |
|
34 | 34 | |
|
35 | 35 | [handlers] |
|
36 | 36 | keys = console |
|
37 | 37 | |
|
38 | 38 | [formatters] |
|
39 | 39 | keys = generic |
|
40 | 40 | |
|
41 | 41 | ############# |
|
42 | 42 | ## LOGGERS ## |
|
43 | 43 | ############# |
|
44 | 44 | [logger_root] |
|
45 | 45 | level = NOTSET |
|
46 | 46 | handlers = console |
|
47 | 47 | |
|
48 | 48 | [logger_vcsserver] |
|
49 | 49 | level = DEBUG |
|
50 | 50 | handlers = |
|
51 | 51 | qualname = vcsserver |
|
52 | 52 | propagate = 1 |
|
53 | 53 | |
|
54 | 54 | [logger_beaker] |
|
55 | 55 | level = DEBUG |
|
56 | 56 | handlers = |
|
57 | 57 | qualname = beaker |
|
58 | 58 | propagate = 1 |
|
59 | 59 | |
|
60 | [logger_pyro4] | |
|
61 | level = DEBUG | |
|
62 | handlers = | |
|
63 | qualname = Pyro4 | |
|
64 | propagate = 1 | |
|
65 | ||
|
66 | 60 | |
|
67 | 61 | ############## |
|
68 | 62 | ## HANDLERS ## |
|
69 | 63 | ############## |
|
70 | 64 | |
|
71 | 65 | [handler_console] |
|
72 | 66 | class = StreamHandler |
|
73 | 67 | args = (sys.stderr,) |
|
74 | 68 | level = INFO |
|
75 | 69 | formatter = generic |
|
76 | 70 | |
|
77 | 71 | ################ |
|
78 | 72 | ## FORMATTERS ## |
|
79 | 73 | ################ |
|
80 | 74 | |
|
81 | 75 | [formatter_generic] |
|
82 | 76 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
83 | 77 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,255 +1,254 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | # Import early to make sure things are patched up properly |
|
22 | 22 | from setuptools import setup, find_packages |
|
23 | 23 | |
|
24 | 24 | import os |
|
25 | 25 | import sys |
|
26 | 26 | import pkgutil |
|
27 | 27 | import platform |
|
28 | 28 | |
|
29 | 29 | from pip.download import PipSession |
|
30 | 30 | from pip.req import parse_requirements |
|
31 | 31 | |
|
32 | 32 | from codecs import open |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | if sys.version_info < (2, 7): |
|
36 | 36 | raise Exception('RhodeCode requires Python 2.7 or later') |
|
37 | 37 | |
|
38 | 38 | here = os.path.abspath(os.path.dirname(__file__)) |
|
39 | 39 | |
|
40 | 40 | # defines current platform |
|
41 | 41 | __platform__ = platform.system() |
|
42 | 42 | __license__ = 'AGPLv3, and Commercial License' |
|
43 | 43 | __author__ = 'RhodeCode GmbH' |
|
44 | 44 | __url__ = 'https://code.rhodecode.com' |
|
45 | 45 | is_windows = __platform__ in ('Windows',) |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | def _get_requirements(req_filename, exclude=None, extras=None): |
|
49 | 49 | extras = extras or [] |
|
50 | 50 | exclude = exclude or [] |
|
51 | 51 | |
|
52 | 52 | try: |
|
53 | 53 | parsed = parse_requirements( |
|
54 | 54 | os.path.join(here, req_filename), session=PipSession()) |
|
55 | 55 | except TypeError: |
|
56 | 56 | # try pip < 6.0.0, that doesn't support session |
|
57 | 57 | parsed = parse_requirements(os.path.join(here, req_filename)) |
|
58 | 58 | |
|
59 | 59 | requirements = [] |
|
60 | 60 | for ir in parsed: |
|
61 | 61 | if ir.req and ir.name not in exclude: |
|
62 | 62 | requirements.append(str(ir.req)) |
|
63 | 63 | return requirements + extras |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | # requirements extract |
|
67 | 67 | setup_requirements = ['PasteScript', 'pytest-runner'] |
|
68 | 68 | install_requirements = _get_requirements( |
|
69 | 69 | 'requirements.txt', exclude=['setuptools']) |
|
70 | 70 | test_requirements = _get_requirements( |
|
71 | 71 | 'requirements_test.txt', extras=['configobj']) |
|
72 | 72 | |
|
73 | 73 | install_requirements = [ |
|
74 | 74 | 'Babel', |
|
75 | 75 | 'Beaker', |
|
76 | 76 | 'FormEncode', |
|
77 | 77 | 'Mako', |
|
78 | 78 | 'Markdown', |
|
79 | 79 | 'MarkupSafe', |
|
80 | 80 | 'MySQL-python', |
|
81 | 81 | 'Paste', |
|
82 | 82 | 'PasteDeploy', |
|
83 | 83 | 'PasteScript', |
|
84 | 84 | 'Pygments', |
|
85 | 85 | 'pygments-markdown-lexer', |
|
86 | 86 | 'Pylons', |
|
87 | 'Pyro4', | |
|
88 | 87 | 'Routes', |
|
89 | 88 | 'SQLAlchemy', |
|
90 | 89 | 'Tempita', |
|
91 | 90 | 'URLObject', |
|
92 | 91 | 'WebError', |
|
93 | 92 | 'WebHelpers', |
|
94 | 93 | 'WebHelpers2', |
|
95 | 94 | 'WebOb', |
|
96 | 95 | 'WebTest', |
|
97 | 96 | 'Whoosh', |
|
98 | 97 | 'alembic', |
|
99 | 98 | 'amqplib', |
|
100 | 99 | 'anyjson', |
|
101 | 100 | 'appenlight-client', |
|
102 | 101 | 'authomatic', |
|
103 | 102 | 'backport_ipaddress', |
|
104 | 103 | 'celery', |
|
105 | 104 | 'channelstream', |
|
106 | 105 | 'colander', |
|
107 | 106 | 'decorator', |
|
108 | 107 | 'deform', |
|
109 | 108 | 'docutils', |
|
110 | 109 | 'gevent', |
|
111 | 110 | 'gunicorn', |
|
112 | 111 | 'infrae.cache', |
|
113 | 112 | 'ipython', |
|
114 | 113 | 'iso8601', |
|
115 | 114 | 'kombu', |
|
116 | 115 | 'msgpack-python', |
|
117 | 116 | 'packaging', |
|
118 | 117 | 'psycopg2', |
|
119 | 118 | 'py-gfm', |
|
120 | 119 | 'pycrypto', |
|
121 | 120 | 'pycurl', |
|
122 | 121 | 'pyparsing', |
|
123 | 122 | 'pyramid', |
|
124 | 123 | 'pyramid-debugtoolbar', |
|
125 | 124 | 'pyramid-mako', |
|
126 | 125 | 'pyramid-beaker', |
|
127 | 126 | 'pysqlite', |
|
128 | 127 | 'python-dateutil', |
|
129 | 128 | 'python-ldap', |
|
130 | 129 | 'python-memcached', |
|
131 | 130 | 'python-pam', |
|
132 | 131 | 'recaptcha-client', |
|
133 | 132 | 'repoze.lru', |
|
134 | 133 | 'requests', |
|
135 | 134 | 'simplejson', |
|
136 | 135 | 'subprocess32', |
|
137 | 136 | 'waitress', |
|
138 | 137 | 'zope.cachedescriptors', |
|
139 | 138 | 'dogpile.cache', |
|
140 | 139 | 'dogpile.core', |
|
141 | 140 | 'psutil', |
|
142 | 141 | 'py-bcrypt', |
|
143 | 142 | ] |
|
144 | 143 | |
|
145 | 144 | |
|
146 | 145 | def get_version(): |
|
147 | 146 | version = pkgutil.get_data('rhodecode', 'VERSION') |
|
148 | 147 | return version.strip() |
|
149 | 148 | |
|
150 | 149 | |
|
151 | 150 | # additional files that goes into package itself |
|
152 | 151 | package_data = { |
|
153 | 152 | '': ['*.txt', '*.rst'], |
|
154 | 153 | 'configs': ['*.ini'], |
|
155 | 154 | 'rhodecode': ['VERSION', 'i18n/*/LC_MESSAGES/*.mo', ], |
|
156 | 155 | } |
|
157 | 156 | |
|
158 | 157 | description = 'Source Code Management Platform' |
|
159 | 158 | keywords = ' '.join([ |
|
160 | 159 | 'rhodecode', 'mercurial', 'git', 'svn', |
|
161 | 160 | 'code review', |
|
162 | 161 | 'repo groups', 'ldap', 'repository management', 'hgweb', |
|
163 | 162 | 'hgwebdir', 'gitweb', 'serving hgweb', |
|
164 | 163 | ]) |
|
165 | 164 | |
|
166 | 165 | |
|
167 | 166 | # README/DESCRIPTION generation |
|
168 | 167 | readme_file = 'README.rst' |
|
169 | 168 | changelog_file = 'CHANGES.rst' |
|
170 | 169 | try: |
|
171 | 170 | long_description = open(readme_file).read() + '\n\n' + \ |
|
172 | 171 | open(changelog_file).read() |
|
173 | 172 | except IOError as err: |
|
174 | 173 | sys.stderr.write( |
|
175 | 174 | "[WARNING] Cannot find file specified as long_description (%s)\n " |
|
176 | 175 | "or changelog (%s) skipping that file" % (readme_file, changelog_file)) |
|
177 | 176 | long_description = description |
|
178 | 177 | |
|
179 | 178 | |
|
180 | 179 | setup( |
|
181 | 180 | name='rhodecode-enterprise-ce', |
|
182 | 181 | version=get_version(), |
|
183 | 182 | description=description, |
|
184 | 183 | long_description=long_description, |
|
185 | 184 | keywords=keywords, |
|
186 | 185 | license=__license__, |
|
187 | 186 | author=__author__, |
|
188 | 187 | author_email='marcin@rhodecode.com', |
|
189 | 188 | url=__url__, |
|
190 | 189 | setup_requires=setup_requirements, |
|
191 | 190 | install_requires=install_requirements, |
|
192 | 191 | tests_require=test_requirements, |
|
193 | 192 | zip_safe=False, |
|
194 | 193 | packages=find_packages(exclude=["docs", "tests*"]), |
|
195 | 194 | package_data=package_data, |
|
196 | 195 | include_package_data=True, |
|
197 | 196 | classifiers=[ |
|
198 | 197 | 'Development Status :: 6 - Mature', |
|
199 | 198 | 'Environment :: Web Environment', |
|
200 | 199 | 'Intended Audience :: Developers', |
|
201 | 200 | 'Operating System :: OS Independent', |
|
202 | 201 | 'Topic :: Software Development :: Version Control', |
|
203 | 202 | 'License :: OSI Approved :: Affero GNU General Public License v3 or later (AGPLv3+)', |
|
204 | 203 | 'Programming Language :: Python :: 2.7', |
|
205 | 204 | ], |
|
206 | 205 | message_extractors={ |
|
207 | 206 | 'rhodecode': [ |
|
208 | 207 | ('**.py', 'python', None), |
|
209 | 208 | ('**.js', 'javascript', None), |
|
210 | 209 | ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}), |
|
211 | 210 | ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}), |
|
212 | 211 | ('public/**', 'ignore', None), |
|
213 | 212 | ] |
|
214 | 213 | }, |
|
215 | 214 | paster_plugins=['PasteScript', 'Pylons'], |
|
216 | 215 | entry_points={ |
|
217 | 216 | 'enterprise.plugins1': [ |
|
218 | 217 | 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory', |
|
219 | 218 | 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory', |
|
220 | 219 | 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory', |
|
221 | 220 | 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory', |
|
222 | 221 | 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory', |
|
223 | 222 | 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory', |
|
224 | 223 | 'token=rhodecode.authentication.plugins.auth_token:plugin_factory', |
|
225 | 224 | ], |
|
226 | 225 | 'paste.app_factory': [ |
|
227 | 226 | 'main=rhodecode.config.middleware:make_pyramid_app', |
|
228 | 227 | 'pylons=rhodecode.config.middleware:make_app', |
|
229 | 228 | ], |
|
230 | 229 | 'paste.app_install': [ |
|
231 | 230 | 'main=pylons.util:PylonsInstaller', |
|
232 | 231 | 'pylons=pylons.util:PylonsInstaller', |
|
233 | 232 | ], |
|
234 | 233 | 'paste.global_paster_command': [ |
|
235 | 234 | 'make-config=rhodecode.lib.paster_commands.make_config:Command', |
|
236 | 235 | 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command', |
|
237 | 236 | 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command', |
|
238 | 237 | 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command', |
|
239 | 238 | 'ishell=rhodecode.lib.paster_commands.ishell:Command', |
|
240 | 239 | 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb', |
|
241 | 240 | 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand', |
|
242 | 241 | ], |
|
243 | 242 | 'pytest11': [ |
|
244 | 243 | 'pylons=rhodecode.tests.pylons_plugin', |
|
245 | 244 | 'enterprise=rhodecode.tests.plugin', |
|
246 | 245 | ], |
|
247 | 246 | 'console_scripts': [ |
|
248 | 247 | 'rcserver=rhodecode.rcserver:main', |
|
249 | 248 | ], |
|
250 | 249 | 'beaker.backends': [ |
|
251 | 250 | 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase', |
|
252 | 251 | 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug' |
|
253 | 252 | ] |
|
254 | 253 | }, |
|
255 | 254 | ) |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now