##// END OF EJS Templates
release: Merge default into stable for release preparation
super-admin -
r4729:f8c5eac8 merge stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,55 b''
1 |RCE| 4.26.0 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2021-08-06
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13
14
15 General
16 ^^^^^^^
17
18 - Caches: introduce invalidation as a safer ways to expire keys, deleting them are more problematic.
19 - Caches: improved locking problems with distributed lock new cache backend.
20 - Pull requests: optimize db transaction logic.
21 This should prevent potential problems with locking of pull-requests that have a lot of reviewers.
22 - Pull requests: updates use retry logic in case of update is locked/fails for some concurrency issues.
23 - Pull requests: allow forced state change to repo admins too.
24 - SSH: handle subrepos better when using SSH communication.
25
26
27 Security
28 ^^^^^^^^
29
30 - Drafts comments: don't allow to view history for others than owner.
31 - Validators: apply username validator to prevent bad values being searched in DB, and potential XSS payload sent via validators.
32
33
34 Performance
35 ^^^^^^^^^^^
36
37 - SSH: use pre-compiled backends for faster matching of vcs detection.
38 - Routing: don't check channelstream connections for faster handling of this route.
39 - Routing: skip vcsdetection for ops view so they are not checked against the vcs operations.
40
41
42 Fixes
43 ^^^^^
44
45 - Permissions: flush all users permissions when creating a new user group.
46 - Repos: recover properly from bad extraction of repo_id from URL and DB calls.
47 - Comments history: fixed fetching of history for comments
48 - Pull requests: fix potential crash on providing a wrong order-by type column.
49 - Caches: report damaged DB on key iterations too not only the GET call
50 - API: added proper full permission flush on API calls when creating repos and repo groups.
51
52 Upgrade notes
53 ^^^^^^^^^^^^^
54
55 - Scheduled release 4.26.0.
@@ -1,6 +1,5 b''
1 1 [bumpversion]
2 current_version = 4.25.2
2 current_version = 4.26.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:rhodecode/VERSION]
6
@@ -1,33 +1,28 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:rc_tools_pinned]
8 done = true
9 8
10 9 [task:fixes_on_stable]
11 done = true
12 10
13 11 [task:pip2nix_generated]
14 done = true
15 12
16 13 [task:changelog_updated]
17 done = true
18 14
19 15 [task:generate_api_docs]
20 done = true
16
17 [task:updated_translation]
21 18
22 19 [release]
23 state = prepared
24 version = 4.25.2
25
26 [task:updated_translation]
20 state = in_progress
21 version = 4.26.0
27 22
28 23 [task:generate_js_routes]
29 24
30 25 [task:updated_trial_license]
31 26
32 27 [task:generate_oss_licenses]
33 28
@@ -1,796 +1,800 b''
1 1 ## -*- coding: utf-8 -*-
2 2
3 3 ; #########################################
4 4 ; RHODECODE COMMUNITY EDITION CONFIGURATION
5 5 ; #########################################
6 6
7 7 [DEFAULT]
8 8 ; Debug flag sets all loggers to debug, and enables request tracking
9 9 debug = false
10 10
11 11 ; ########################################################################
12 12 ; EMAIL CONFIGURATION
13 13 ; These settings will be used by the RhodeCode mailing system
14 14 ; ########################################################################
15 15
16 16 ; prefix all emails subjects with given prefix, helps filtering out emails
17 17 #email_prefix = [RhodeCode]
18 18
19 19 ; email FROM address all mails will be sent
20 20 #app_email_from = rhodecode-noreply@localhost
21 21
22 22 #smtp_server = mail.server.com
23 23 #smtp_username =
24 24 #smtp_password =
25 25 #smtp_port =
26 26 #smtp_use_tls = false
27 27 #smtp_use_ssl = true
28 28
29 29 [server:main]
30 30 ; COMMON HOST/IP CONFIG
31 31 host = 127.0.0.1
32 32 port = 5000
33 33
34 34
35 35 ; ###########################
36 36 ; GUNICORN APPLICATION SERVER
37 37 ; ###########################
38 38
39 39 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
40 40
41 41 ; Module to use, this setting shouldn't be changed
42 42 use = egg:gunicorn#main
43 43
44 44 ; Sets the number of process workers. More workers means more concurrent connections
45 45 ; RhodeCode can handle at the same time. Each additional worker also it increases
46 46 ; memory usage as each has it's own set of caches.
47 47 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
48 48 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
49 49 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
50 50 ; when using more than 1 worker.
51 51 workers = 2
52 52
53 53 ; Gunicorn access log level
54 54 loglevel = info
55 55
56 56 ; Process name visible in process list
57 57 proc_name = rhodecode
58 58
59 59 ; Type of worker class, one of `sync`, `gevent`
60 60 ; Recommended type is `gevent`
61 61 worker_class = gevent
62 62
63 63 ; The maximum number of simultaneous clients per worker. Valid only for gevent
64 64 worker_connections = 10
65 65
66 66 ; Max number of requests that worker will handle before being gracefully restarted.
67 67 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
68 68 max_requests = 1000
69 69 max_requests_jitter = 30
70 70
71 71 ; Amount of time a worker can spend with handling a request before it
72 72 ; gets killed and restarted. By default set to 21600 (6hrs)
73 73 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
74 74 timeout = 21600
75 75
76 76 ; The maximum size of HTTP request line in bytes.
77 77 ; 0 for unlimited
78 78 limit_request_line = 0
79 79
80 80 ; Limit the number of HTTP headers fields in a request.
81 81 ; By default this value is 100 and can't be larger than 32768.
82 82 limit_request_fields = 32768
83 83
84 84 ; Limit the allowed size of an HTTP request header field.
85 85 ; Value is a positive number or 0.
86 86 ; Setting it to 0 will allow unlimited header field sizes.
87 87 limit_request_field_size = 0
88 88
89 89 ; Timeout for graceful workers restart.
90 90 ; After receiving a restart signal, workers have this much time to finish
91 91 ; serving requests. Workers still alive after the timeout (starting from the
92 92 ; receipt of the restart signal) are force killed.
93 93 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
94 94 graceful_timeout = 3600
95 95
96 96 # The number of seconds to wait for requests on a Keep-Alive connection.
97 97 # Generally set in the 1-5 seconds range.
98 98 keepalive = 2
99 99
100 100 ; Maximum memory usage that each worker can use before it will receive a
101 101 ; graceful restart signal 0 = memory monitoring is disabled
102 102 ; Examples: 268435456 (256MB), 536870912 (512MB)
103 103 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
104 104 memory_max_usage = 0
105 105
106 106 ; How often in seconds to check for memory usage for each gunicorn worker
107 107 memory_usage_check_interval = 60
108 108
109 109 ; Threshold value for which we don't recycle worker if GarbageCollection
110 110 ; frees up enough resources. Before each restart we try to run GC on worker
111 111 ; in case we get enough free memory after that, restart will not happen.
112 112 memory_usage_recovery_threshold = 0.8
113 113
114 114
115 115 ; Prefix middleware for RhodeCode.
116 116 ; recommended when using proxy setup.
117 117 ; allows to set RhodeCode under a prefix in server.
118 118 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
119 119 ; And set your prefix like: `prefix = /custom_prefix`
120 120 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
121 121 ; to make your cookies only work on prefix url
122 122 [filter:proxy-prefix]
123 123 use = egg:PasteDeploy#prefix
124 124 prefix = /
125 125
126 126 [app:main]
127 127 ; The %(here)s variable will be replaced with the absolute path of parent directory
128 128 ; of this file
129 129 ; In addition ENVIRONMENT variables usage is possible, e.g
130 130 ; sqlalchemy.db1.url = {ENV_RC_DB_URL}
131 131
132 132 use = egg:rhodecode-enterprise-ce
133 133
134 134 ; enable proxy prefix middleware, defined above
135 135 #filter-with = proxy-prefix
136 136
137 137 ; encryption key used to encrypt social plugin tokens,
138 138 ; remote_urls with credentials etc, if not set it defaults to
139 139 ; `beaker.session.secret`
140 140 #rhodecode.encrypted_values.secret =
141 141
142 142 ; decryption strict mode (enabled by default). It controls if decryption raises
143 143 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
144 144 #rhodecode.encrypted_values.strict = false
145 145
146 146 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
147 147 ; fernet is safer, and we strongly recommend switching to it.
148 148 ; Due to backward compatibility aes is used as default.
149 149 #rhodecode.encrypted_values.algorithm = fernet
150 150
151 151 ; Return gzipped responses from RhodeCode (static files/application)
152 152 gzip_responses = false
153 153
154 154 ; Auto-generate javascript routes file on startup
155 155 generate_js_files = false
156 156
157 157 ; System global default language.
158 158 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
159 159 lang = en
160 160
161 161 ; Perform a full repository scan and import on each server start.
162 162 ; Settings this to true could lead to very long startup time.
163 163 startup.import_repos = false
164 164
165 165 ; Uncomment and set this path to use archive download cache.
166 166 ; Once enabled, generated archives will be cached at this location
167 167 ; and served from the cache during subsequent requests for the same archive of
168 168 ; the repository.
169 169 #archive_cache_dir = /tmp/tarballcache
170 170
171 171 ; URL at which the application is running. This is used for Bootstrapping
172 172 ; requests in context when no web request is available. Used in ishell, or
173 173 ; SSH calls. Set this for events to receive proper url for SSH calls.
174 174 app.base_url = http://rhodecode.local
175 175
176 176 ; Unique application ID. Should be a random unique string for security.
177 177 app_instance_uuid = rc-production
178 178
179 179 ; Cut off limit for large diffs (size in bytes). If overall diff size on
180 180 ; commit, or pull request exceeds this limit this diff will be displayed
181 181 ; partially. E.g 512000 == 512Kb
182 182 cut_off_limit_diff = 512000
183 183
184 184 ; Cut off limit for large files inside diffs (size in bytes). Each individual
185 185 ; file inside diff which exceeds this limit will be displayed partially.
186 186 ; E.g 128000 == 128Kb
187 187 cut_off_limit_file = 128000
188 188
189 189 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
190 190 vcs_full_cache = true
191 191
192 192 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
193 193 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
194 194 force_https = false
195 195
196 196 ; use Strict-Transport-Security headers
197 197 use_htsts = false
198 198
199 199 ; Set to true if your repos are exposed using the dumb protocol
200 200 git_update_server_info = false
201 201
202 202 ; RSS/ATOM feed options
203 203 rss_cut_off_limit = 256000
204 204 rss_items_per_page = 10
205 205 rss_include_diff = false
206 206
207 207 ; gist URL alias, used to create nicer urls for gist. This should be an
208 208 ; url that does rewrites to _admin/gists/{gistid}.
209 209 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
210 210 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
211 211 gist_alias_url =
212 212
213 213 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
214 214 ; used for access.
215 215 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
216 216 ; came from the the logged in user who own this authentication token.
217 217 ; Additionally @TOKEN syntax can be used to bound the view to specific
218 218 ; authentication token. Such view would be only accessible when used together
219 219 ; with this authentication token
220 220 ; list of all views can be found under `/_admin/permissions/auth_token_access`
221 221 ; The list should be "," separated and on a single line.
222 222 ; Most common views to enable:
223 223
224 224 # RepoCommitsView:repo_commit_download
225 225 # RepoCommitsView:repo_commit_patch
226 226 # RepoCommitsView:repo_commit_raw
227 227 # RepoCommitsView:repo_commit_raw@TOKEN
228 228 # RepoFilesView:repo_files_diff
229 229 # RepoFilesView:repo_archivefile
230 230 # RepoFilesView:repo_file_raw
231 231 # GistView:*
232 232 api_access_controllers_whitelist =
233 233
234 234 ; Default encoding used to convert from and to unicode
235 235 ; can be also a comma separated list of encoding in case of mixed encodings
236 236 default_encoding = UTF-8
237 237
238 238 ; instance-id prefix
239 239 ; a prefix key for this instance used for cache invalidation when running
240 240 ; multiple instances of RhodeCode, make sure it's globally unique for
241 241 ; all running RhodeCode instances. Leave empty if you don't use it
242 242 instance_id =
243 243
244 244 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
245 245 ; of an authentication plugin also if it is disabled by it's settings.
246 246 ; This could be useful if you are unable to log in to the system due to broken
247 247 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
248 248 ; module to log in again and fix the settings.
249 249 ; Available builtin plugin IDs (hash is part of the ID):
250 250 ; egg:rhodecode-enterprise-ce#rhodecode
251 251 ; egg:rhodecode-enterprise-ce#pam
252 252 ; egg:rhodecode-enterprise-ce#ldap
253 253 ; egg:rhodecode-enterprise-ce#jasig_cas
254 254 ; egg:rhodecode-enterprise-ce#headers
255 255 ; egg:rhodecode-enterprise-ce#crowd
256 256
257 257 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
258 258
259 259 ; Flag to control loading of legacy plugins in py:/path format
260 260 auth_plugin.import_legacy_plugins = true
261 261
262 262 ; alternative return HTTP header for failed authentication. Default HTTP
263 263 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
264 264 ; handling that causing a series of failed authentication calls.
265 265 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
266 266 ; This will be served instead of default 401 on bad authentication
267 267 auth_ret_code =
268 268
269 269 ; use special detection method when serving auth_ret_code, instead of serving
270 270 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
271 271 ; and then serve auth_ret_code to clients
272 272 auth_ret_code_detection = false
273 273
274 274 ; locking return code. When repository is locked return this HTTP code. 2XX
275 275 ; codes don't break the transactions while 4XX codes do
276 276 lock_ret_code = 423
277 277
278 278 ; allows to change the repository location in settings page
279 279 allow_repo_location_change = true
280 280
281 281 ; allows to setup custom hooks in settings page
282 282 allow_custom_hooks_settings = true
283 283
284 284 ; Generated license token required for EE edition license.
285 285 ; New generated token value can be found in Admin > settings > license page.
286 286 license_token =
287 287
288 288 ; This flag hides sensitive information on the license page such as token, and license data
289 289 license.hide_license_info = false
290 290
291 291 ; supervisor connection uri, for managing supervisor and logs.
292 292 supervisor.uri =
293 293
294 294 ; supervisord group name/id we only want this RC instance to handle
295 295 supervisor.group_id = prod
296 296
297 297 ; Display extended labs settings
298 298 labs_settings_active = true
299 299
300 300 ; Custom exception store path, defaults to TMPDIR
301 301 ; This is used to store exception from RhodeCode in shared directory
302 302 #exception_tracker.store_path =
303 303
304 304 ; Send email with exception details when it happens
305 305 #exception_tracker.send_email = false
306 306
307 307 ; Comma separated list of recipients for exception emails,
308 308 ; e.g admin@rhodecode.com,devops@rhodecode.com
309 309 ; Can be left empty, then emails will be sent to ALL super-admins
310 310 #exception_tracker.send_email_recipients =
311 311
312 312 ; optional prefix to Add to email Subject
313 313 #exception_tracker.email_prefix = [RHODECODE ERROR]
314 314
315 315 ; File store configuration. This is used to store and serve uploaded files
316 316 file_store.enabled = true
317 317
318 318 ; Storage backend, available options are: local
319 319 file_store.backend = local
320 320
321 321 ; path to store the uploaded binaries
322 322 file_store.storage_path = %(here)s/data/file_store
323 323
324 324
325 325 ; #############
326 326 ; CELERY CONFIG
327 327 ; #############
328 328
329 329 ; manually run celery: /path/to/celery worker -E --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
330 330
331 331 use_celery = false
332 332
333 333 ; connection url to the message broker (default redis)
334 334 celery.broker_url = redis://localhost:6379/8
335 335
336 336 ; rabbitmq example
337 337 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
338 338
339 339 ; maximum tasks to execute before worker restart
340 340 celery.max_tasks_per_child = 100
341 341
342 342 ; tasks will never be sent to the queue, but executed locally instead.
343 343 celery.task_always_eager = false
344 344
345 345 ; #############
346 346 ; DOGPILE CACHE
347 347 ; #############
348 348
349 349 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
350 350 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
351 351 cache_dir = %(here)s/data
352 352
353 353 ; *********************************************
354 354 ; `sql_cache_short` cache for heavy SQL queries
355 355 ; Only supported backend is `memory_lru`
356 356 ; *********************************************
357 357 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
358 358 rc_cache.sql_cache_short.expiration_time = 30
359 359
360 360
361 361 ; *****************************************************
362 362 ; `cache_repo_longterm` cache for repo object instances
363 363 ; Only supported backend is `memory_lru`
364 364 ; *****************************************************
365 365 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
366 366 ; by default we use 30 Days, cache is still invalidated on push
367 367 rc_cache.cache_repo_longterm.expiration_time = 2592000
368 368 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
369 369 rc_cache.cache_repo_longterm.max_size = 10000
370 370
371 371
372 372 ; *************************************************
373 373 ; `cache_perms` cache for permission tree, auth TTL
374 374 ; *************************************************
375 375 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
376 376 rc_cache.cache_perms.expiration_time = 300
377 377 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
378 378 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms.db
379 379
380 380 ; alternative `cache_perms` redis backend with distributed lock
381 381 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
382 382 #rc_cache.cache_perms.expiration_time = 300
383 383
384 384 ; redis_expiration_time needs to be greater then expiration_time
385 385 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
386 386
387 387 #rc_cache.cache_perms.arguments.host = localhost
388 388 #rc_cache.cache_perms.arguments.port = 6379
389 389 #rc_cache.cache_perms.arguments.db = 0
390 390 #rc_cache.cache_perms.arguments.socket_timeout = 30
391 391 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
392 392 #rc_cache.cache_perms.arguments.distributed_lock = true
393 393
394 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
395 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
394 396
395 397 ; ***************************************************
396 398 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
397 399 ; ***************************************************
398 400 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
399 401 rc_cache.cache_repo.expiration_time = 2592000
400 402 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
401 403 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo.db
402 404
403 405 ; alternative `cache_repo` redis backend with distributed lock
404 406 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
405 407 #rc_cache.cache_repo.expiration_time = 2592000
406 408
407 409 ; redis_expiration_time needs to be greater then expiration_time
408 410 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
409 411
410 412 #rc_cache.cache_repo.arguments.host = localhost
411 413 #rc_cache.cache_repo.arguments.port = 6379
412 414 #rc_cache.cache_repo.arguments.db = 1
413 415 #rc_cache.cache_repo.arguments.socket_timeout = 30
414 416 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
415 417 #rc_cache.cache_repo.arguments.distributed_lock = true
416 418
419 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
420 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
417 421
418 422 ; ##############
419 423 ; BEAKER SESSION
420 424 ; ##############
421 425
422 426 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
423 427 ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified).
424 428 ; Fastest ones are Redis and ext:database
425 429 beaker.session.type = file
426 430 beaker.session.data_dir = %(here)s/data/sessions
427 431
428 432 ; Redis based sessions
429 433 #beaker.session.type = ext:redis
430 434 #beaker.session.url = redis://127.0.0.1:6379/2
431 435
432 436 ; DB based session, fast, and allows easy management over logged in users
433 437 #beaker.session.type = ext:database
434 438 #beaker.session.table_name = db_session
435 439 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
436 440 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
437 441 #beaker.session.sa.pool_recycle = 3600
438 442 #beaker.session.sa.echo = false
439 443
440 444 beaker.session.key = rhodecode
441 445 beaker.session.secret = production-rc-uytcxaz
442 446 beaker.session.lock_dir = %(here)s/data/sessions/lock
443 447
444 448 ; Secure encrypted cookie. Requires AES and AES python libraries
445 449 ; you must disable beaker.session.secret to use this
446 450 #beaker.session.encrypt_key = key_for_encryption
447 451 #beaker.session.validate_key = validation_key
448 452
449 453 ; Sets session as invalid (also logging out user) if it haven not been
450 454 ; accessed for given amount of time in seconds
451 455 beaker.session.timeout = 2592000
452 456 beaker.session.httponly = true
453 457
454 458 ; Path to use for the cookie. Set to prefix if you use prefix middleware
455 459 #beaker.session.cookie_path = /custom_prefix
456 460
457 461 ; Set https secure cookie
458 462 beaker.session.secure = false
459 463
460 464 ; default cookie expiration time in seconds, set to `true` to set expire
461 465 ; at browser close
462 466 #beaker.session.cookie_expires = 3600
463 467
464 468 ; #############################
465 469 ; SEARCH INDEXING CONFIGURATION
466 470 ; #############################
467 471
468 472 ; Full text search indexer is available in rhodecode-tools under
469 473 ; `rhodecode-tools index` command
470 474
471 475 ; WHOOSH Backend, doesn't require additional services to run
472 476 ; it works good with few dozen repos
473 477 search.module = rhodecode.lib.index.whoosh
474 478 search.location = %(here)s/data/index
475 479
476 480 ; ####################
477 481 ; CHANNELSTREAM CONFIG
478 482 ; ####################
479 483
480 484 ; channelstream enables persistent connections and live notification
481 485 ; in the system. It's also used by the chat system
482 486
483 487 channelstream.enabled = false
484 488
485 489 ; server address for channelstream server on the backend
486 490 channelstream.server = 127.0.0.1:9800
487 491
488 492 ; location of the channelstream server from outside world
489 493 ; use ws:// for http or wss:// for https. This address needs to be handled
490 494 ; by external HTTP server such as Nginx or Apache
491 495 ; see Nginx/Apache configuration examples in our docs
492 496 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
493 497 channelstream.secret = secret
494 498 channelstream.history.location = %(here)s/channelstream_history
495 499
496 500 ; Internal application path that Javascript uses to connect into.
497 501 ; If you use proxy-prefix the prefix should be added before /_channelstream
498 502 channelstream.proxy_path = /_channelstream
499 503
500 504
501 505 ; ##############################
502 506 ; MAIN RHODECODE DATABASE CONFIG
503 507 ; ##############################
504 508
505 509 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
506 510 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
507 511 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
508 512 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
509 513 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
510 514
511 515 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
512 516
513 517 ; see sqlalchemy docs for other advanced settings
514 518 ; print the sql statements to output
515 519 sqlalchemy.db1.echo = false
516 520
517 521 ; recycle the connections after this amount of seconds
518 522 sqlalchemy.db1.pool_recycle = 3600
519 523 sqlalchemy.db1.convert_unicode = true
520 524
521 525 ; the number of connections to keep open inside the connection pool.
522 526 ; 0 indicates no limit
523 527 #sqlalchemy.db1.pool_size = 5
524 528
525 529 ; The number of connections to allow in connection pool "overflow", that is
526 530 ; connections that can be opened above and beyond the pool_size setting,
527 531 ; which defaults to five.
528 532 #sqlalchemy.db1.max_overflow = 10
529 533
530 534 ; Connection check ping, used to detect broken database connections
531 535 ; could be enabled to better handle cases if MySQL has gone away errors
532 536 #sqlalchemy.db1.ping_connection = true
533 537
534 538 ; ##########
535 539 ; VCS CONFIG
536 540 ; ##########
537 541 vcs.server.enable = true
538 542 vcs.server = localhost:9900
539 543
540 544 ; Web server connectivity protocol, responsible for web based VCS operations
541 545 ; Available protocols are:
542 546 ; `http` - use http-rpc backend (default)
543 547 vcs.server.protocol = http
544 548
545 549 ; Push/Pull operations protocol, available options are:
546 550 ; `http` - use http-rpc backend (default)
547 551 vcs.scm_app_implementation = http
548 552
549 553 ; Push/Pull operations hooks protocol, available options are:
550 554 ; `http` - use http-rpc backend (default)
551 555 vcs.hooks.protocol = http
552 556
553 557 ; Host on which this instance is listening for hooks. If vcsserver is in other location
554 558 ; this should be adjusted.
555 559 vcs.hooks.host = 127.0.0.1
556 560
557 561 ; Start VCSServer with this instance as a subprocess, useful for development
558 562 vcs.start_server = false
559 563
560 564 ; List of enabled VCS backends, available options are:
561 565 ; `hg` - mercurial
562 566 ; `git` - git
563 567 ; `svn` - subversion
564 568 vcs.backends = hg, git, svn
565 569
566 570 ; Wait this number of seconds before killing connection to the vcsserver
567 571 vcs.connection_timeout = 3600
568 572
569 573 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
570 574 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
571 575 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
572 576 #vcs.svn.compatible_version = 1.8
573 577
574 578
575 579 ; ####################################################
576 580 ; Subversion proxy support (mod_dav_svn)
577 581 ; Maps RhodeCode repo groups into SVN paths for Apache
578 582 ; ####################################################
579 583
580 584 ; Enable or disable the config file generation.
581 585 svn.proxy.generate_config = false
582 586
583 587 ; Generate config file with `SVNListParentPath` set to `On`.
584 588 svn.proxy.list_parent_path = true
585 589
586 590 ; Set location and file name of generated config file.
587 591 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
588 592
589 593 ; alternative mod_dav config template. This needs to be a valid mako template
590 594 ; Example template can be found in the source code:
591 595 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
592 596 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
593 597
594 598 ; Used as a prefix to the `Location` block in the generated config file.
595 599 ; In most cases it should be set to `/`.
596 600 svn.proxy.location_root = /
597 601
598 602 ; Command to reload the mod dav svn configuration on change.
599 603 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
600 604 ; Make sure user who runs RhodeCode process is allowed to reload Apache
601 605 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
602 606
603 607 ; If the timeout expires before the reload command finishes, the command will
604 608 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
605 609 #svn.proxy.reload_timeout = 10
606 610
607 611 ; ####################
608 612 ; SSH Support Settings
609 613 ; ####################
610 614
611 615 ; Defines if a custom authorized_keys file should be created and written on
612 616 ; any change user ssh keys. Setting this to false also disables possibility
613 617 ; of adding SSH keys by users from web interface. Super admins can still
614 618 ; manage SSH Keys.
615 619 ssh.generate_authorized_keyfile = false
616 620
617 621 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
618 622 # ssh.authorized_keys_ssh_opts =
619 623
620 624 ; Path to the authorized_keys file where the generate entries are placed.
621 625 ; It is possible to have multiple key files specified in `sshd_config` e.g.
622 626 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
623 627 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
624 628
625 629 ; Command to execute the SSH wrapper. The binary is available in the
626 630 ; RhodeCode installation directory.
627 631 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
628 632 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
629 633
630 634 ; Allow shell when executing the ssh-wrapper command
631 635 ssh.wrapper_cmd_allow_shell = false
632 636
633 637 ; Enables logging, and detailed output send back to the client during SSH
634 638 ; operations. Useful for debugging, shouldn't be used in production.
635 639 ssh.enable_debug_logging = false
636 640
637 641 ; Paths to binary executable, by default they are the names, but we can
638 642 ; override them if we want to use a custom one
639 643 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
640 644 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
641 645 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
642 646
643 647 ; Enables SSH key generator web interface. Disabling this still allows users
644 648 ; to add their own keys.
645 649 ssh.enable_ui_key_generator = true
646 650
647 651
648 652 ; #################
649 653 ; APPENLIGHT CONFIG
650 654 ; #################
651 655
652 656 ; Appenlight is tailored to work with RhodeCode, see
653 657 ; http://appenlight.rhodecode.com for details how to obtain an account
654 658
655 659 ; Appenlight integration enabled
656 660 appenlight = false
657 661
658 662 appenlight.server_url = https://api.appenlight.com
659 663 appenlight.api_key = YOUR_API_KEY
660 664 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
661 665
662 666 ; used for JS client
663 667 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
664 668
665 669 ; TWEAK AMOUNT OF INFO SENT HERE
666 670
667 671 ; enables 404 error logging (default False)
668 672 appenlight.report_404 = false
669 673
670 674 ; time in seconds after request is considered being slow (default 1)
671 675 appenlight.slow_request_time = 1
672 676
673 677 ; record slow requests in application
674 678 ; (needs to be enabled for slow datastore recording and time tracking)
675 679 appenlight.slow_requests = true
676 680
677 681 ; enable hooking to application loggers
678 682 appenlight.logging = true
679 683
680 684 ; minimum log level for log capture
681 685 appenlight.logging.level = WARNING
682 686
683 687 ; send logs only from erroneous/slow requests
684 688 ; (saves API quota for intensive logging)
685 689 appenlight.logging_on_error = false
686 690
687 691 ; list of additional keywords that should be grabbed from environ object
688 692 ; can be string with comma separated list of words in lowercase
689 693 ; (by default client will always send following info:
690 694 ; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
691 695 ; start with HTTP* this list be extended with additional keywords here
692 696 appenlight.environ_keys_whitelist =
693 697
694 698 ; list of keywords that should be blanked from request object
695 699 ; can be string with comma separated list of words in lowercase
696 700 ; (by default client will always blank keys that contain following words
697 701 ; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
698 702 ; this list be extended with additional keywords set here
699 703 appenlight.request_keys_blacklist =
700 704
701 705 ; list of namespaces that should be ignores when gathering log entries
702 706 ; can be string with comma separated list of namespaces
703 707 ; (by default the client ignores own entries: appenlight_client.client)
704 708 appenlight.log_namespace_blacklist =
705 709
706 710 ; Dummy marker to add new entries after.
707 711 ; Add any custom entries below. Please don't remove this marker.
708 712 custom.conf = 1
709 713
710 714
711 715 ; #####################
712 716 ; LOGGING CONFIGURATION
713 717 ; #####################
714 718 [loggers]
715 719 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
716 720
717 721 [handlers]
718 722 keys = console, console_sql
719 723
720 724 [formatters]
721 725 keys = generic, color_formatter, color_formatter_sql
722 726
723 727 ; #######
724 728 ; LOGGERS
725 729 ; #######
726 730 [logger_root]
727 731 level = NOTSET
728 732 handlers = console
729 733
730 734 [logger_sqlalchemy]
731 735 level = INFO
732 736 handlers = console_sql
733 737 qualname = sqlalchemy.engine
734 738 propagate = 0
735 739
736 740 [logger_beaker]
737 741 level = DEBUG
738 742 handlers =
739 743 qualname = beaker.container
740 744 propagate = 1
741 745
742 746 [logger_rhodecode]
743 747 level = DEBUG
744 748 handlers =
745 749 qualname = rhodecode
746 750 propagate = 1
747 751
748 752 [logger_ssh_wrapper]
749 753 level = DEBUG
750 754 handlers =
751 755 qualname = ssh_wrapper
752 756 propagate = 1
753 757
754 758 [logger_celery]
755 759 level = DEBUG
756 760 handlers =
757 761 qualname = celery
758 762
759 763
760 764 ; ########
761 765 ; HANDLERS
762 766 ; ########
763 767
764 768 [handler_console]
765 769 class = StreamHandler
766 770 args = (sys.stderr, )
767 771 level = INFO
768 772 formatter = generic
769 773
770 774 [handler_console_sql]
771 775 ; "level = DEBUG" logs SQL queries and results.
772 776 ; "level = INFO" logs SQL queries.
773 777 ; "level = WARN" logs neither. (Recommended for production systems.)
774 778 class = StreamHandler
775 779 args = (sys.stderr, )
776 780 level = WARN
777 781 formatter = generic
778 782
779 783 ; ##########
780 784 ; FORMATTERS
781 785 ; ##########
782 786
783 787 [formatter_generic]
784 788 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
785 789 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
786 790 datefmt = %Y-%m-%d %H:%M:%S
787 791
788 792 [formatter_color_formatter]
789 793 class = rhodecode.lib.logging_formatter.ColorFormatter
790 794 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
791 795 datefmt = %Y-%m-%d %H:%M:%S
792 796
793 797 [formatter_color_formatter_sql]
794 798 class = rhodecode.lib.logging_formatter.ColorFormatterSql
795 799 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
796 800 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,156 +1,157 b''
1 1 .. _rhodecode-release-notes-ref:
2 2
3 3 Release Notes
4 4 =============
5 5
6 6 |RCE| 4.x Versions
7 7 ------------------
8 8
9 9 .. toctree::
10 10 :maxdepth: 1
11 11
12 release-notes-4.26.0.rst
12 13 release-notes-4.25.2.rst
13 14 release-notes-4.25.1.rst
14 15 release-notes-4.25.0.rst
15 16 release-notes-4.24.1.rst
16 17 release-notes-4.24.0.rst
17 18 release-notes-4.23.2.rst
18 19 release-notes-4.23.1.rst
19 20 release-notes-4.23.0.rst
20 21 release-notes-4.22.0.rst
21 22 release-notes-4.21.0.rst
22 23 release-notes-4.20.1.rst
23 24 release-notes-4.20.0.rst
24 25 release-notes-4.19.3.rst
25 26 release-notes-4.19.2.rst
26 27 release-notes-4.19.1.rst
27 28 release-notes-4.19.0.rst
28 29 release-notes-4.18.3.rst
29 30 release-notes-4.18.2.rst
30 31 release-notes-4.18.1.rst
31 32 release-notes-4.18.0.rst
32 33 release-notes-4.17.4.rst
33 34 release-notes-4.17.3.rst
34 35 release-notes-4.17.2.rst
35 36 release-notes-4.17.1.rst
36 37 release-notes-4.17.0.rst
37 38 release-notes-4.16.2.rst
38 39 release-notes-4.16.1.rst
39 40 release-notes-4.16.0.rst
40 41 release-notes-4.15.2.rst
41 42 release-notes-4.15.1.rst
42 43 release-notes-4.15.0.rst
43 44 release-notes-4.14.1.rst
44 45 release-notes-4.14.0.rst
45 46 release-notes-4.13.3.rst
46 47 release-notes-4.13.2.rst
47 48 release-notes-4.13.1.rst
48 49 release-notes-4.13.0.rst
49 50 release-notes-4.12.4.rst
50 51 release-notes-4.12.3.rst
51 52 release-notes-4.12.2.rst
52 53 release-notes-4.12.1.rst
53 54 release-notes-4.12.0.rst
54 55 release-notes-4.11.6.rst
55 56 release-notes-4.11.5.rst
56 57 release-notes-4.11.4.rst
57 58 release-notes-4.11.3.rst
58 59 release-notes-4.11.2.rst
59 60 release-notes-4.11.1.rst
60 61 release-notes-4.11.0.rst
61 62 release-notes-4.10.6.rst
62 63 release-notes-4.10.5.rst
63 64 release-notes-4.10.4.rst
64 65 release-notes-4.10.3.rst
65 66 release-notes-4.10.2.rst
66 67 release-notes-4.10.1.rst
67 68 release-notes-4.10.0.rst
68 69 release-notes-4.9.1.rst
69 70 release-notes-4.9.0.rst
70 71 release-notes-4.8.0.rst
71 72 release-notes-4.7.2.rst
72 73 release-notes-4.7.1.rst
73 74 release-notes-4.7.0.rst
74 75 release-notes-4.6.1.rst
75 76 release-notes-4.6.0.rst
76 77 release-notes-4.5.2.rst
77 78 release-notes-4.5.1.rst
78 79 release-notes-4.5.0.rst
79 80 release-notes-4.4.2.rst
80 81 release-notes-4.4.1.rst
81 82 release-notes-4.4.0.rst
82 83 release-notes-4.3.1.rst
83 84 release-notes-4.3.0.rst
84 85 release-notes-4.2.1.rst
85 86 release-notes-4.2.0.rst
86 87 release-notes-4.1.2.rst
87 88 release-notes-4.1.1.rst
88 89 release-notes-4.1.0.rst
89 90 release-notes-4.0.1.rst
90 91 release-notes-4.0.0.rst
91 92
92 93 |RCE| 3.x Versions
93 94 ------------------
94 95
95 96 .. toctree::
96 97 :maxdepth: 1
97 98
98 99 release-notes-3.8.4.rst
99 100 release-notes-3.8.3.rst
100 101 release-notes-3.8.2.rst
101 102 release-notes-3.8.1.rst
102 103 release-notes-3.8.0.rst
103 104 release-notes-3.7.1.rst
104 105 release-notes-3.7.0.rst
105 106 release-notes-3.6.1.rst
106 107 release-notes-3.6.0.rst
107 108 release-notes-3.5.2.rst
108 109 release-notes-3.5.1.rst
109 110 release-notes-3.5.0.rst
110 111 release-notes-3.4.1.rst
111 112 release-notes-3.4.0.rst
112 113 release-notes-3.3.4.rst
113 114 release-notes-3.3.3.rst
114 115 release-notes-3.3.2.rst
115 116 release-notes-3.3.1.rst
116 117 release-notes-3.3.0.rst
117 118 release-notes-3.2.3.rst
118 119 release-notes-3.2.2.rst
119 120 release-notes-3.2.1.rst
120 121 release-notes-3.2.0.rst
121 122 release-notes-3.1.1.rst
122 123 release-notes-3.1.0.rst
123 124 release-notes-3.0.2.rst
124 125 release-notes-3.0.1.rst
125 126 release-notes-3.0.0.rst
126 127
127 128 |RCE| 2.x Versions
128 129 ------------------
129 130
130 131 .. toctree::
131 132 :maxdepth: 1
132 133
133 134 release-notes-2.2.8.rst
134 135 release-notes-2.2.7.rst
135 136 release-notes-2.2.6.rst
136 137 release-notes-2.2.5.rst
137 138 release-notes-2.2.4.rst
138 139 release-notes-2.2.3.rst
139 140 release-notes-2.2.2.rst
140 141 release-notes-2.2.1.rst
141 142 release-notes-2.2.0.rst
142 143 release-notes-2.1.0.rst
143 144 release-notes-2.0.2.rst
144 145 release-notes-2.0.1.rst
145 146 release-notes-2.0.0.rst
146 147
147 148 |RCE| 1.x Versions
148 149 ------------------
149 150
150 151 .. toctree::
151 152 :maxdepth: 1
152 153
153 154 release-notes-1.7.2.rst
154 155 release-notes-1.7.1.rst
155 156 release-notes-1.7.0.rst
156 157 release-notes-1.6.0.rst
@@ -1,2520 +1,2520 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "alembic" = super.buildPythonPackage {
8 8 name = "alembic-1.4.2";
9 9 doCheck = false;
10 10 propagatedBuildInputs = [
11 11 self."sqlalchemy"
12 12 self."mako"
13 13 self."python-editor"
14 14 self."python-dateutil"
15 15 ];
16 16 src = fetchurl {
17 17 url = "https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz";
18 18 sha256 = "1gsdrzx9h7wfva200qvvsc9sn4w79mk2vs0bbnzjhxi1jw2b0nh3";
19 19 };
20 20 meta = {
21 21 license = [ pkgs.lib.licenses.mit ];
22 22 };
23 23 };
24 24 "amqp" = super.buildPythonPackage {
25 25 name = "amqp-2.5.2";
26 26 doCheck = false;
27 27 propagatedBuildInputs = [
28 28 self."vine"
29 29 ];
30 30 src = fetchurl {
31 31 url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz";
32 32 sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp";
33 33 };
34 34 meta = {
35 35 license = [ pkgs.lib.licenses.bsdOriginal ];
36 36 };
37 37 };
38 38 "apispec" = super.buildPythonPackage {
39 39 name = "apispec-1.0.0";
40 40 doCheck = false;
41 41 propagatedBuildInputs = [
42 42 self."PyYAML"
43 43 ];
44 44 src = fetchurl {
45 45 url = "https://files.pythonhosted.org/packages/67/15/346c04988dd67d36007e28145504c520491930c878b1f484a97b27a8f497/apispec-1.0.0.tar.gz";
46 46 sha256 = "1712w1anvqrvadjjpvai84vbaygaxabd3zz5lxihdzwzs4gvi9sp";
47 47 };
48 48 meta = {
49 49 license = [ pkgs.lib.licenses.mit ];
50 50 };
51 51 };
52 52 "appenlight-client" = super.buildPythonPackage {
53 53 name = "appenlight-client-0.6.26";
54 54 doCheck = false;
55 55 propagatedBuildInputs = [
56 56 self."webob"
57 57 self."requests"
58 58 self."six"
59 59 ];
60 60 src = fetchurl {
61 61 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
62 62 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
63 63 };
64 64 meta = {
65 65 license = [ pkgs.lib.licenses.bsdOriginal ];
66 66 };
67 67 };
68 68 "asn1crypto" = super.buildPythonPackage {
69 69 name = "asn1crypto-0.24.0";
70 70 doCheck = false;
71 71 src = fetchurl {
72 72 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
73 73 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.mit ];
77 77 };
78 78 };
79 79 "atomicwrites" = super.buildPythonPackage {
80 80 name = "atomicwrites-1.3.0";
81 81 doCheck = false;
82 82 src = fetchurl {
83 83 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
84 84 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
85 85 };
86 86 meta = {
87 87 license = [ pkgs.lib.licenses.mit ];
88 88 };
89 89 };
90 90 "attrs" = super.buildPythonPackage {
91 91 name = "attrs-19.3.0";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
95 95 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.mit ];
99 99 };
100 100 };
101 101 "babel" = super.buildPythonPackage {
102 102 name = "babel-1.3";
103 103 doCheck = false;
104 104 propagatedBuildInputs = [
105 105 self."pytz"
106 106 ];
107 107 src = fetchurl {
108 108 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
109 109 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
110 110 };
111 111 meta = {
112 112 license = [ pkgs.lib.licenses.bsdOriginal ];
113 113 };
114 114 };
115 115 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
116 116 name = "backports.shutil-get-terminal-size-1.0.0";
117 117 doCheck = false;
118 118 src = fetchurl {
119 119 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
120 120 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
121 121 };
122 122 meta = {
123 123 license = [ pkgs.lib.licenses.mit ];
124 124 };
125 125 };
126 126 "beaker" = super.buildPythonPackage {
127 127 name = "beaker-1.9.1";
128 128 doCheck = false;
129 129 propagatedBuildInputs = [
130 130 self."funcsigs"
131 131 ];
132 132 src = fetchurl {
133 133 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
134 134 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
135 135 };
136 136 meta = {
137 137 license = [ pkgs.lib.licenses.bsdOriginal ];
138 138 };
139 139 };
140 140 "beautifulsoup4" = super.buildPythonPackage {
141 141 name = "beautifulsoup4-4.6.3";
142 142 doCheck = false;
143 143 src = fetchurl {
144 144 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
145 145 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
146 146 };
147 147 meta = {
148 148 license = [ pkgs.lib.licenses.mit ];
149 149 };
150 150 };
151 151 "billiard" = super.buildPythonPackage {
152 152 name = "billiard-3.6.1.0";
153 153 doCheck = false;
154 154 src = fetchurl {
155 155 url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz";
156 156 sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q";
157 157 };
158 158 meta = {
159 159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 160 };
161 161 };
162 162 "bleach" = super.buildPythonPackage {
163 163 name = "bleach-3.1.3";
164 164 doCheck = false;
165 165 propagatedBuildInputs = [
166 166 self."six"
167 167 self."webencodings"
168 168 ];
169 169 src = fetchurl {
170 170 url = "https://files.pythonhosted.org/packages/de/09/5267f8577a92487ed43bc694476c4629c6eca2e3c93fcf690a26bfe39e1d/bleach-3.1.3.tar.gz";
171 171 sha256 = "0al437aw4p2xp83az5hhlrp913nsf0cg6kg4qj3fjhv4wakxipzq";
172 172 };
173 173 meta = {
174 174 license = [ pkgs.lib.licenses.asl20 ];
175 175 };
176 176 };
177 177 "bumpversion" = super.buildPythonPackage {
178 178 name = "bumpversion-0.5.3";
179 179 doCheck = false;
180 180 src = fetchurl {
181 181 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
182 182 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
183 183 };
184 184 meta = {
185 185 license = [ pkgs.lib.licenses.mit ];
186 186 };
187 187 };
188 188 "cachetools" = super.buildPythonPackage {
189 189 name = "cachetools-3.1.1";
190 190 doCheck = false;
191 191 src = fetchurl {
192 192 url = "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz";
193 193 sha256 = "16m69l6n6y1r1y7cklm92rr7v69ldig2n3lbl3j323w5jz7d78lf";
194 194 };
195 195 meta = {
196 196 license = [ pkgs.lib.licenses.mit ];
197 197 };
198 198 };
199 199 "celery" = super.buildPythonPackage {
200 200 name = "celery-4.3.0";
201 201 doCheck = false;
202 202 propagatedBuildInputs = [
203 203 self."pytz"
204 204 self."billiard"
205 205 self."kombu"
206 206 self."vine"
207 207 ];
208 208 src = fetchurl {
209 209 url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz";
210 210 sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac";
211 211 };
212 212 meta = {
213 213 license = [ pkgs.lib.licenses.bsdOriginal ];
214 214 };
215 215 };
216 216 "certifi" = super.buildPythonPackage {
217 217 name = "certifi-2020.4.5.1";
218 218 doCheck = false;
219 219 src = fetchurl {
220 220 url = "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz";
221 221 sha256 = "06b5gfs7wmmipln8f3z928d2mmx2j4b3x7pnqmj6cvmyfh8v7z2i";
222 222 };
223 223 meta = {
224 224 license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ];
225 225 };
226 226 };
227 227 "cffi" = super.buildPythonPackage {
228 228 name = "cffi-1.12.3";
229 229 doCheck = false;
230 230 propagatedBuildInputs = [
231 231 self."pycparser"
232 232 ];
233 233 src = fetchurl {
234 234 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
235 235 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
236 236 };
237 237 meta = {
238 238 license = [ pkgs.lib.licenses.mit ];
239 239 };
240 240 };
241 241 "chameleon" = super.buildPythonPackage {
242 242 name = "chameleon-2.24";
243 243 doCheck = false;
244 244 src = fetchurl {
245 245 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
246 246 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
247 247 };
248 248 meta = {
249 249 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
250 250 };
251 251 };
252 252 "channelstream" = super.buildPythonPackage {
253 253 name = "channelstream-0.6.14";
254 254 doCheck = false;
255 255 propagatedBuildInputs = [
256 256 self."gevent"
257 257 self."ws4py"
258 258 self."marshmallow"
259 259 self."python-dateutil"
260 260 self."pyramid"
261 261 self."pyramid-jinja2"
262 262 self."pyramid-apispec"
263 263 self."itsdangerous"
264 264 self."requests"
265 265 self."six"
266 266 ];
267 267 src = fetchurl {
268 268 url = "https://files.pythonhosted.org/packages/d4/2d/86d6757ccd06ce673ee224123471da3d45251d061da7c580bfc259bad853/channelstream-0.6.14.tar.gz";
269 269 sha256 = "0qgy5j3rj6c8cslzidh32glhkrhbbdxjc008y69v8a0y3zyaz2d3";
270 270 };
271 271 meta = {
272 272 license = [ pkgs.lib.licenses.bsdOriginal ];
273 273 };
274 274 };
275 275 "chardet" = super.buildPythonPackage {
276 276 name = "chardet-3.0.4";
277 277 doCheck = false;
278 278 src = fetchurl {
279 279 url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz";
280 280 sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4";
281 281 };
282 282 meta = {
283 283 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
284 284 };
285 285 };
286 286 "click" = super.buildPythonPackage {
287 287 name = "click-7.0";
288 288 doCheck = false;
289 289 src = fetchurl {
290 290 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
291 291 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
292 292 };
293 293 meta = {
294 294 license = [ pkgs.lib.licenses.bsdOriginal ];
295 295 };
296 296 };
297 297 "colander" = super.buildPythonPackage {
298 298 name = "colander-1.7.0";
299 299 doCheck = false;
300 300 propagatedBuildInputs = [
301 301 self."translationstring"
302 302 self."iso8601"
303 303 self."enum34"
304 304 ];
305 305 src = fetchurl {
306 306 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
307 307 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
308 308 };
309 309 meta = {
310 310 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
311 311 };
312 312 };
313 313 "configobj" = super.buildPythonPackage {
314 314 name = "configobj-5.0.6";
315 315 doCheck = false;
316 316 propagatedBuildInputs = [
317 317 self."six"
318 318 ];
319 319 src = fetchurl {
320 320 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
321 321 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
322 322 };
323 323 meta = {
324 324 license = [ pkgs.lib.licenses.bsdOriginal ];
325 325 };
326 326 };
327 327 "configparser" = super.buildPythonPackage {
328 328 name = "configparser-4.0.2";
329 329 doCheck = false;
330 330 src = fetchurl {
331 331 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
332 332 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
333 333 };
334 334 meta = {
335 335 license = [ pkgs.lib.licenses.mit ];
336 336 };
337 337 };
338 338 "contextlib2" = super.buildPythonPackage {
339 339 name = "contextlib2-0.6.0.post1";
340 340 doCheck = false;
341 341 src = fetchurl {
342 342 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
343 343 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
344 344 };
345 345 meta = {
346 346 license = [ pkgs.lib.licenses.psfl ];
347 347 };
348 348 };
349 349 "cov-core" = super.buildPythonPackage {
350 350 name = "cov-core-1.15.0";
351 351 doCheck = false;
352 352 propagatedBuildInputs = [
353 353 self."coverage"
354 354 ];
355 355 src = fetchurl {
356 356 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
357 357 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
358 358 };
359 359 meta = {
360 360 license = [ pkgs.lib.licenses.mit ];
361 361 };
362 362 };
363 363 "coverage" = super.buildPythonPackage {
364 364 name = "coverage-4.5.4";
365 365 doCheck = false;
366 366 src = fetchurl {
367 367 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
368 368 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
369 369 };
370 370 meta = {
371 371 license = [ pkgs.lib.licenses.asl20 ];
372 372 };
373 373 };
374 374 "cryptography" = super.buildPythonPackage {
375 375 name = "cryptography-2.6.1";
376 376 doCheck = false;
377 377 propagatedBuildInputs = [
378 378 self."asn1crypto"
379 379 self."six"
380 380 self."cffi"
381 381 self."enum34"
382 382 self."ipaddress"
383 383 ];
384 384 src = fetchurl {
385 385 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
386 386 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
387 387 };
388 388 meta = {
389 389 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
390 390 };
391 391 };
392 392 "cssselect" = super.buildPythonPackage {
393 393 name = "cssselect-1.0.3";
394 394 doCheck = false;
395 395 src = fetchurl {
396 396 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
397 397 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
398 398 };
399 399 meta = {
400 400 license = [ pkgs.lib.licenses.bsdOriginal ];
401 401 };
402 402 };
403 403 "cssutils" = super.buildPythonPackage {
404 404 name = "cssutils-1.0.2";
405 405 doCheck = false;
406 406 src = fetchurl {
407 407 url = "https://files.pythonhosted.org/packages/5c/0b/c5f29d29c037e97043770b5e7c740b6252993e4b57f029b3cd03c78ddfec/cssutils-1.0.2.tar.gz";
408 408 sha256 = "1bxchrbqzapwijap0yhlxdil1w9bmwvgx77aizlkhc2mcxjg1z52";
409 409 };
410 410 meta = {
411 411 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL 2.1 or later, see also http://cthedot.de/cssutils/"; } ];
412 412 };
413 413 };
414 414 "decorator" = super.buildPythonPackage {
415 415 name = "decorator-4.1.2";
416 416 doCheck = false;
417 417 src = fetchurl {
418 418 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
419 419 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
420 420 };
421 421 meta = {
422 422 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
423 423 };
424 424 };
425 425 "deform" = super.buildPythonPackage {
426 426 name = "deform-2.0.8";
427 427 doCheck = false;
428 428 propagatedBuildInputs = [
429 429 self."chameleon"
430 430 self."colander"
431 431 self."iso8601"
432 432 self."peppercorn"
433 433 self."translationstring"
434 434 self."zope.deprecation"
435 435 ];
436 436 src = fetchurl {
437 437 url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz";
438 438 sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9";
439 439 };
440 440 meta = {
441 441 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
442 442 };
443 443 };
444 444 "defusedxml" = super.buildPythonPackage {
445 445 name = "defusedxml-0.6.0";
446 446 doCheck = false;
447 447 src = fetchurl {
448 448 url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz";
449 449 sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n";
450 450 };
451 451 meta = {
452 452 license = [ pkgs.lib.licenses.psfl ];
453 453 };
454 454 };
455 455 "dm.xmlsec.binding" = super.buildPythonPackage {
456 456 name = "dm.xmlsec.binding-1.3.7";
457 457 doCheck = false;
458 458 propagatedBuildInputs = [
459 459 self."setuptools"
460 460 self."lxml"
461 461 ];
462 462 src = fetchurl {
463 463 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
464 464 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
465 465 };
466 466 meta = {
467 467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 468 };
469 469 };
470 470 "docutils" = super.buildPythonPackage {
471 471 name = "docutils-0.16";
472 472 doCheck = false;
473 473 src = fetchurl {
474 474 url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz";
475 475 sha256 = "1z3qliszqca9m719q3qhdkh0ghh90g500avzdgi7pl77x5h3mpn2";
476 476 };
477 477 meta = {
478 478 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
479 479 };
480 480 };
481 481 "dogpile.cache" = super.buildPythonPackage {
482 482 name = "dogpile.cache-0.9.0";
483 483 doCheck = false;
484 484 propagatedBuildInputs = [
485 485 self."decorator"
486 486 ];
487 487 src = fetchurl {
488 488 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
489 489 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
490 490 };
491 491 meta = {
492 492 license = [ pkgs.lib.licenses.bsdOriginal ];
493 493 };
494 494 };
495 495 "dogpile.core" = super.buildPythonPackage {
496 496 name = "dogpile.core-0.4.1";
497 497 doCheck = false;
498 498 src = fetchurl {
499 499 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
500 500 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
501 501 };
502 502 meta = {
503 503 license = [ pkgs.lib.licenses.bsdOriginal ];
504 504 };
505 505 };
506 506 "ecdsa" = super.buildPythonPackage {
507 507 name = "ecdsa-0.13.2";
508 508 doCheck = false;
509 509 src = fetchurl {
510 510 url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz";
511 511 sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw";
512 512 };
513 513 meta = {
514 514 license = [ pkgs.lib.licenses.mit ];
515 515 };
516 516 };
517 517 "elasticsearch" = super.buildPythonPackage {
518 518 name = "elasticsearch-6.3.1";
519 519 doCheck = false;
520 520 propagatedBuildInputs = [
521 521 self."urllib3"
522 522 ];
523 523 src = fetchurl {
524 524 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
525 525 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
526 526 };
527 527 meta = {
528 528 license = [ pkgs.lib.licenses.asl20 ];
529 529 };
530 530 };
531 531 "elasticsearch-dsl" = super.buildPythonPackage {
532 532 name = "elasticsearch-dsl-6.3.1";
533 533 doCheck = false;
534 534 propagatedBuildInputs = [
535 535 self."six"
536 536 self."python-dateutil"
537 537 self."elasticsearch"
538 538 self."ipaddress"
539 539 ];
540 540 src = fetchurl {
541 541 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
542 542 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
543 543 };
544 544 meta = {
545 545 license = [ pkgs.lib.licenses.asl20 ];
546 546 };
547 547 };
548 548 "elasticsearch1" = super.buildPythonPackage {
549 549 name = "elasticsearch1-1.10.0";
550 550 doCheck = false;
551 551 propagatedBuildInputs = [
552 552 self."urllib3"
553 553 ];
554 554 src = fetchurl {
555 555 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
556 556 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
557 557 };
558 558 meta = {
559 559 license = [ pkgs.lib.licenses.asl20 ];
560 560 };
561 561 };
562 562 "elasticsearch1-dsl" = super.buildPythonPackage {
563 563 name = "elasticsearch1-dsl-0.0.12";
564 564 doCheck = false;
565 565 propagatedBuildInputs = [
566 566 self."six"
567 567 self."python-dateutil"
568 568 self."elasticsearch1"
569 569 ];
570 570 src = fetchurl {
571 571 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
572 572 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
573 573 };
574 574 meta = {
575 575 license = [ pkgs.lib.licenses.asl20 ];
576 576 };
577 577 };
578 578 "elasticsearch2" = super.buildPythonPackage {
579 579 name = "elasticsearch2-2.5.1";
580 580 doCheck = false;
581 581 propagatedBuildInputs = [
582 582 self."urllib3"
583 583 ];
584 584 src = fetchurl {
585 585 url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz";
586 586 sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k";
587 587 };
588 588 meta = {
589 589 license = [ pkgs.lib.licenses.asl20 ];
590 590 };
591 591 };
592 592 "entrypoints" = super.buildPythonPackage {
593 593 name = "entrypoints-0.2.2";
594 594 doCheck = false;
595 595 propagatedBuildInputs = [
596 596 self."configparser"
597 597 ];
598 598 src = fetchurl {
599 599 url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d";
600 600 sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5";
601 601 };
602 602 meta = {
603 603 license = [ pkgs.lib.licenses.mit ];
604 604 };
605 605 };
606 606 "enum34" = super.buildPythonPackage {
607 607 name = "enum34-1.1.10";
608 608 doCheck = false;
609 609 src = fetchurl {
610 610 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
611 611 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
612 612 };
613 613 meta = {
614 614 license = [ pkgs.lib.licenses.bsdOriginal ];
615 615 };
616 616 };
617 617 "formencode" = super.buildPythonPackage {
618 618 name = "formencode-1.2.4";
619 619 doCheck = false;
620 620 src = fetchurl {
621 621 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
622 622 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
623 623 };
624 624 meta = {
625 625 license = [ pkgs.lib.licenses.psfl ];
626 626 };
627 627 };
628 628 "funcsigs" = super.buildPythonPackage {
629 629 name = "funcsigs-1.0.2";
630 630 doCheck = false;
631 631 src = fetchurl {
632 632 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
633 633 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
634 634 };
635 635 meta = {
636 636 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
637 637 };
638 638 };
639 639 "functools32" = super.buildPythonPackage {
640 640 name = "functools32-3.2.3.post2";
641 641 doCheck = false;
642 642 src = fetchurl {
643 643 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
644 644 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
645 645 };
646 646 meta = {
647 647 license = [ pkgs.lib.licenses.psfl ];
648 648 };
649 649 };
650 650 "future" = super.buildPythonPackage {
651 651 name = "future-0.14.3";
652 652 doCheck = false;
653 653 src = fetchurl {
654 654 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
655 655 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
656 656 };
657 657 meta = {
658 658 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
659 659 };
660 660 };
661 661 "futures" = super.buildPythonPackage {
662 662 name = "futures-3.0.2";
663 663 doCheck = false;
664 664 src = fetchurl {
665 665 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
666 666 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
667 667 };
668 668 meta = {
669 669 license = [ pkgs.lib.licenses.bsdOriginal ];
670 670 };
671 671 };
672 672 "gevent" = super.buildPythonPackage {
673 673 name = "gevent-1.5.0";
674 674 doCheck = false;
675 675 propagatedBuildInputs = [
676 676 self."greenlet"
677 677 ];
678 678 src = fetchurl {
679 679 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
680 680 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
681 681 };
682 682 meta = {
683 683 license = [ pkgs.lib.licenses.mit ];
684 684 };
685 685 };
686 686 "gnureadline" = super.buildPythonPackage {
687 687 name = "gnureadline-6.3.8";
688 688 doCheck = false;
689 689 src = fetchurl {
690 690 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
691 691 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
692 692 };
693 693 meta = {
694 694 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
695 695 };
696 696 };
697 697 "gprof2dot" = super.buildPythonPackage {
698 698 name = "gprof2dot-2017.9.19";
699 699 doCheck = false;
700 700 src = fetchurl {
701 701 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
702 702 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
703 703 };
704 704 meta = {
705 705 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
706 706 };
707 707 };
708 708 "greenlet" = super.buildPythonPackage {
709 709 name = "greenlet-0.4.15";
710 710 doCheck = false;
711 711 src = fetchurl {
712 712 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
713 713 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
714 714 };
715 715 meta = {
716 716 license = [ pkgs.lib.licenses.mit ];
717 717 };
718 718 };
719 719 "gunicorn" = super.buildPythonPackage {
720 720 name = "gunicorn-19.9.0";
721 721 doCheck = false;
722 722 src = fetchurl {
723 723 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
724 724 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
725 725 };
726 726 meta = {
727 727 license = [ pkgs.lib.licenses.mit ];
728 728 };
729 729 };
730 730 "hupper" = super.buildPythonPackage {
731 731 name = "hupper-1.10.2";
732 732 doCheck = false;
733 733 src = fetchurl {
734 734 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
735 735 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
736 736 };
737 737 meta = {
738 738 license = [ pkgs.lib.licenses.mit ];
739 739 };
740 740 };
741 741 "idna" = super.buildPythonPackage {
742 742 name = "idna-2.8";
743 743 doCheck = false;
744 744 src = fetchurl {
745 745 url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz";
746 746 sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3";
747 747 };
748 748 meta = {
749 749 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
750 750 };
751 751 };
752 752 "importlib-metadata" = super.buildPythonPackage {
753 753 name = "importlib-metadata-1.6.0";
754 754 doCheck = false;
755 755 propagatedBuildInputs = [
756 756 self."zipp"
757 757 self."pathlib2"
758 758 self."contextlib2"
759 759 self."configparser"
760 760 ];
761 761 src = fetchurl {
762 762 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
763 763 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
764 764 };
765 765 meta = {
766 766 license = [ pkgs.lib.licenses.asl20 ];
767 767 };
768 768 };
769 769 "infrae.cache" = super.buildPythonPackage {
770 770 name = "infrae.cache-1.0.1";
771 771 doCheck = false;
772 772 propagatedBuildInputs = [
773 773 self."beaker"
774 774 self."repoze.lru"
775 775 ];
776 776 src = fetchurl {
777 777 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
778 778 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
779 779 };
780 780 meta = {
781 781 license = [ pkgs.lib.licenses.zpl21 ];
782 782 };
783 783 };
784 784 "invoke" = super.buildPythonPackage {
785 785 name = "invoke-0.13.0";
786 786 doCheck = false;
787 787 src = fetchurl {
788 788 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
789 789 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
790 790 };
791 791 meta = {
792 792 license = [ pkgs.lib.licenses.bsdOriginal ];
793 793 };
794 794 };
795 795 "ipaddress" = super.buildPythonPackage {
796 796 name = "ipaddress-1.0.23";
797 797 doCheck = false;
798 798 src = fetchurl {
799 799 url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz";
800 800 sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p";
801 801 };
802 802 meta = {
803 803 license = [ pkgs.lib.licenses.psfl ];
804 804 };
805 805 };
806 806 "ipdb" = super.buildPythonPackage {
807 807 name = "ipdb-0.13.2";
808 808 doCheck = false;
809 809 propagatedBuildInputs = [
810 810 self."setuptools"
811 811 self."ipython"
812 812 ];
813 813 src = fetchurl {
814 814 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
815 815 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
816 816 };
817 817 meta = {
818 818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 819 };
820 820 };
821 821 "ipython" = super.buildPythonPackage {
822 822 name = "ipython-5.1.0";
823 823 doCheck = false;
824 824 propagatedBuildInputs = [
825 825 self."setuptools"
826 826 self."decorator"
827 827 self."pickleshare"
828 828 self."simplegeneric"
829 829 self."traitlets"
830 830 self."prompt-toolkit"
831 831 self."pygments"
832 832 self."pexpect"
833 833 self."backports.shutil-get-terminal-size"
834 834 self."pathlib2"
835 835 self."pexpect"
836 836 ];
837 837 src = fetchurl {
838 838 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
839 839 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
840 840 };
841 841 meta = {
842 842 license = [ pkgs.lib.licenses.bsdOriginal ];
843 843 };
844 844 };
845 845 "ipython-genutils" = super.buildPythonPackage {
846 846 name = "ipython-genutils-0.2.0";
847 847 doCheck = false;
848 848 src = fetchurl {
849 849 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
850 850 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
851 851 };
852 852 meta = {
853 853 license = [ pkgs.lib.licenses.bsdOriginal ];
854 854 };
855 855 };
856 856 "iso8601" = super.buildPythonPackage {
857 857 name = "iso8601-0.1.12";
858 858 doCheck = false;
859 859 src = fetchurl {
860 860 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
861 861 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
862 862 };
863 863 meta = {
864 864 license = [ pkgs.lib.licenses.mit ];
865 865 };
866 866 };
867 867 "isodate" = super.buildPythonPackage {
868 868 name = "isodate-0.6.0";
869 869 doCheck = false;
870 870 propagatedBuildInputs = [
871 871 self."six"
872 872 ];
873 873 src = fetchurl {
874 874 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
875 875 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
876 876 };
877 877 meta = {
878 878 license = [ pkgs.lib.licenses.bsdOriginal ];
879 879 };
880 880 };
881 881 "itsdangerous" = super.buildPythonPackage {
882 882 name = "itsdangerous-1.1.0";
883 883 doCheck = false;
884 884 src = fetchurl {
885 885 url = "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz";
886 886 sha256 = "068zpbksq5q2z4dckh2k1zbcq43ay74ylqn77rni797j0wyh66rj";
887 887 };
888 888 meta = {
889 889 license = [ pkgs.lib.licenses.bsdOriginal ];
890 890 };
891 891 };
892 892 "jinja2" = super.buildPythonPackage {
893 893 name = "jinja2-2.9.6";
894 894 doCheck = false;
895 895 propagatedBuildInputs = [
896 896 self."markupsafe"
897 897 ];
898 898 src = fetchurl {
899 899 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
900 900 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
901 901 };
902 902 meta = {
903 903 license = [ pkgs.lib.licenses.bsdOriginal ];
904 904 };
905 905 };
906 906 "jsonschema" = super.buildPythonPackage {
907 907 name = "jsonschema-2.6.0";
908 908 doCheck = false;
909 909 propagatedBuildInputs = [
910 910 self."functools32"
911 911 ];
912 912 src = fetchurl {
913 913 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
914 914 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
915 915 };
916 916 meta = {
917 917 license = [ pkgs.lib.licenses.mit ];
918 918 };
919 919 };
920 920 "jupyter-client" = super.buildPythonPackage {
921 921 name = "jupyter-client-5.0.0";
922 922 doCheck = false;
923 923 propagatedBuildInputs = [
924 924 self."traitlets"
925 925 self."jupyter-core"
926 926 self."pyzmq"
927 927 self."python-dateutil"
928 928 ];
929 929 src = fetchurl {
930 930 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
931 931 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
932 932 };
933 933 meta = {
934 934 license = [ pkgs.lib.licenses.bsdOriginal ];
935 935 };
936 936 };
937 937 "jupyter-core" = super.buildPythonPackage {
938 938 name = "jupyter-core-4.5.0";
939 939 doCheck = false;
940 940 propagatedBuildInputs = [
941 941 self."traitlets"
942 942 ];
943 943 src = fetchurl {
944 944 url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz";
945 945 sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic";
946 946 };
947 947 meta = {
948 948 license = [ pkgs.lib.licenses.bsdOriginal ];
949 949 };
950 950 };
951 951 "kombu" = super.buildPythonPackage {
952 952 name = "kombu-4.6.6";
953 953 doCheck = false;
954 954 propagatedBuildInputs = [
955 955 self."amqp"
956 956 self."importlib-metadata"
957 957 ];
958 958 src = fetchurl {
959 959 url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz";
960 960 sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p";
961 961 };
962 962 meta = {
963 963 license = [ pkgs.lib.licenses.bsdOriginal ];
964 964 };
965 965 };
966 966 "lxml" = super.buildPythonPackage {
967 967 name = "lxml-4.2.5";
968 968 doCheck = false;
969 969 src = fetchurl {
970 970 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
971 971 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
972 972 };
973 973 meta = {
974 974 license = [ pkgs.lib.licenses.bsdOriginal ];
975 975 };
976 976 };
977 977 "mako" = super.buildPythonPackage {
978 978 name = "mako-1.1.0";
979 979 doCheck = false;
980 980 propagatedBuildInputs = [
981 981 self."markupsafe"
982 982 ];
983 983 src = fetchurl {
984 984 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
985 985 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
986 986 };
987 987 meta = {
988 988 license = [ pkgs.lib.licenses.mit ];
989 989 };
990 990 };
991 991 "markdown" = super.buildPythonPackage {
992 992 name = "markdown-2.6.11";
993 993 doCheck = false;
994 994 src = fetchurl {
995 995 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
996 996 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
997 997 };
998 998 meta = {
999 999 license = [ pkgs.lib.licenses.bsdOriginal ];
1000 1000 };
1001 1001 };
1002 1002 "markupsafe" = super.buildPythonPackage {
1003 1003 name = "markupsafe-1.1.1";
1004 1004 doCheck = false;
1005 1005 src = fetchurl {
1006 1006 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
1007 1007 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
1008 1008 };
1009 1009 meta = {
1010 1010 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
1011 1011 };
1012 1012 };
1013 1013 "marshmallow" = super.buildPythonPackage {
1014 1014 name = "marshmallow-2.18.0";
1015 1015 doCheck = false;
1016 1016 src = fetchurl {
1017 1017 url = "https://files.pythonhosted.org/packages/ad/0b/5799965d1c6d5f608d684e2c0dce8a828e0309a3bfe8327d9418a89f591c/marshmallow-2.18.0.tar.gz";
1018 1018 sha256 = "1g0aafpjn7yaxq06yndy8c7rs9n42adxkqq1ayhlr869pr06d3lm";
1019 1019 };
1020 1020 meta = {
1021 1021 license = [ pkgs.lib.licenses.mit ];
1022 1022 };
1023 1023 };
1024 1024 "mistune" = super.buildPythonPackage {
1025 1025 name = "mistune-0.8.4";
1026 1026 doCheck = false;
1027 1027 src = fetchurl {
1028 1028 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
1029 1029 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
1030 1030 };
1031 1031 meta = {
1032 1032 license = [ pkgs.lib.licenses.bsdOriginal ];
1033 1033 };
1034 1034 };
1035 1035 "mock" = super.buildPythonPackage {
1036 1036 name = "mock-3.0.5";
1037 1037 doCheck = false;
1038 1038 propagatedBuildInputs = [
1039 1039 self."six"
1040 1040 self."funcsigs"
1041 1041 ];
1042 1042 src = fetchurl {
1043 1043 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
1044 1044 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
1045 1045 };
1046 1046 meta = {
1047 1047 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
1048 1048 };
1049 1049 };
1050 1050 "more-itertools" = super.buildPythonPackage {
1051 1051 name = "more-itertools-5.0.0";
1052 1052 doCheck = false;
1053 1053 propagatedBuildInputs = [
1054 1054 self."six"
1055 1055 ];
1056 1056 src = fetchurl {
1057 1057 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
1058 1058 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
1059 1059 };
1060 1060 meta = {
1061 1061 license = [ pkgs.lib.licenses.mit ];
1062 1062 };
1063 1063 };
1064 1064 "msgpack-python" = super.buildPythonPackage {
1065 1065 name = "msgpack-python-0.5.6";
1066 1066 doCheck = false;
1067 1067 src = fetchurl {
1068 1068 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
1069 1069 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
1070 1070 };
1071 1071 meta = {
1072 1072 license = [ pkgs.lib.licenses.asl20 ];
1073 1073 };
1074 1074 };
1075 1075 "mysql-python" = super.buildPythonPackage {
1076 1076 name = "mysql-python-1.2.5";
1077 1077 doCheck = false;
1078 1078 src = fetchurl {
1079 1079 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
1080 1080 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
1081 1081 };
1082 1082 meta = {
1083 1083 license = [ pkgs.lib.licenses.gpl1 ];
1084 1084 };
1085 1085 };
1086 1086 "nbconvert" = super.buildPythonPackage {
1087 1087 name = "nbconvert-5.3.1";
1088 1088 doCheck = false;
1089 1089 propagatedBuildInputs = [
1090 1090 self."mistune"
1091 1091 self."jinja2"
1092 1092 self."pygments"
1093 1093 self."traitlets"
1094 1094 self."jupyter-core"
1095 1095 self."nbformat"
1096 1096 self."entrypoints"
1097 1097 self."bleach"
1098 1098 self."pandocfilters"
1099 1099 self."testpath"
1100 1100 ];
1101 1101 src = fetchurl {
1102 1102 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1103 1103 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1104 1104 };
1105 1105 meta = {
1106 1106 license = [ pkgs.lib.licenses.bsdOriginal ];
1107 1107 };
1108 1108 };
1109 1109 "nbformat" = super.buildPythonPackage {
1110 1110 name = "nbformat-4.4.0";
1111 1111 doCheck = false;
1112 1112 propagatedBuildInputs = [
1113 1113 self."ipython-genutils"
1114 1114 self."traitlets"
1115 1115 self."jsonschema"
1116 1116 self."jupyter-core"
1117 1117 ];
1118 1118 src = fetchurl {
1119 1119 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1120 1120 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1121 1121 };
1122 1122 meta = {
1123 1123 license = [ pkgs.lib.licenses.bsdOriginal ];
1124 1124 };
1125 1125 };
1126 1126 "packaging" = super.buildPythonPackage {
1127 1127 name = "packaging-20.3";
1128 1128 doCheck = false;
1129 1129 propagatedBuildInputs = [
1130 1130 self."pyparsing"
1131 1131 self."six"
1132 1132 ];
1133 1133 src = fetchurl {
1134 1134 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
1135 1135 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
1136 1136 };
1137 1137 meta = {
1138 1138 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
1139 1139 };
1140 1140 };
1141 1141 "pandocfilters" = super.buildPythonPackage {
1142 1142 name = "pandocfilters-1.4.2";
1143 1143 doCheck = false;
1144 1144 src = fetchurl {
1145 1145 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1146 1146 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1147 1147 };
1148 1148 meta = {
1149 1149 license = [ pkgs.lib.licenses.bsdOriginal ];
1150 1150 };
1151 1151 };
1152 1152 "paste" = super.buildPythonPackage {
1153 1153 name = "paste-3.4.0";
1154 1154 doCheck = false;
1155 1155 propagatedBuildInputs = [
1156 1156 self."six"
1157 1157 ];
1158 1158 src = fetchurl {
1159 1159 url = "https://files.pythonhosted.org/packages/79/4a/45821b71dd40000507549afd1491546afad8279c0a87527c88776a794158/Paste-3.4.0.tar.gz";
1160 1160 sha256 = "16sichvhyci1gaarkjs35mai8vphh7b244qm14hj1isw38nx4c03";
1161 1161 };
1162 1162 meta = {
1163 1163 license = [ pkgs.lib.licenses.mit ];
1164 1164 };
1165 1165 };
1166 1166 "pastedeploy" = super.buildPythonPackage {
1167 1167 name = "pastedeploy-2.1.0";
1168 1168 doCheck = false;
1169 1169 src = fetchurl {
1170 1170 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
1171 1171 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
1172 1172 };
1173 1173 meta = {
1174 1174 license = [ pkgs.lib.licenses.mit ];
1175 1175 };
1176 1176 };
1177 1177 "pastescript" = super.buildPythonPackage {
1178 1178 name = "pastescript-3.2.0";
1179 1179 doCheck = false;
1180 1180 propagatedBuildInputs = [
1181 1181 self."paste"
1182 1182 self."pastedeploy"
1183 1183 self."six"
1184 1184 ];
1185 1185 src = fetchurl {
1186 1186 url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz";
1187 1187 sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv";
1188 1188 };
1189 1189 meta = {
1190 1190 license = [ pkgs.lib.licenses.mit ];
1191 1191 };
1192 1192 };
1193 1193 "pathlib2" = super.buildPythonPackage {
1194 1194 name = "pathlib2-2.3.5";
1195 1195 doCheck = false;
1196 1196 propagatedBuildInputs = [
1197 1197 self."six"
1198 1198 self."scandir"
1199 1199 ];
1200 1200 src = fetchurl {
1201 1201 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
1202 1202 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
1203 1203 };
1204 1204 meta = {
1205 1205 license = [ pkgs.lib.licenses.mit ];
1206 1206 };
1207 1207 };
1208 1208 "peppercorn" = super.buildPythonPackage {
1209 1209 name = "peppercorn-0.6";
1210 1210 doCheck = false;
1211 1211 src = fetchurl {
1212 1212 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1213 1213 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1214 1214 };
1215 1215 meta = {
1216 1216 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1217 1217 };
1218 1218 };
1219 1219 "pexpect" = super.buildPythonPackage {
1220 1220 name = "pexpect-4.8.0";
1221 1221 doCheck = false;
1222 1222 propagatedBuildInputs = [
1223 1223 self."ptyprocess"
1224 1224 ];
1225 1225 src = fetchurl {
1226 1226 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
1227 1227 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
1228 1228 };
1229 1229 meta = {
1230 1230 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1231 1231 };
1232 1232 };
1233 1233 "pickleshare" = super.buildPythonPackage {
1234 1234 name = "pickleshare-0.7.5";
1235 1235 doCheck = false;
1236 1236 propagatedBuildInputs = [
1237 1237 self."pathlib2"
1238 1238 ];
1239 1239 src = fetchurl {
1240 1240 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1241 1241 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1242 1242 };
1243 1243 meta = {
1244 1244 license = [ pkgs.lib.licenses.mit ];
1245 1245 };
1246 1246 };
1247 1247 "plaster" = super.buildPythonPackage {
1248 1248 name = "plaster-1.0";
1249 1249 doCheck = false;
1250 1250 propagatedBuildInputs = [
1251 1251 self."setuptools"
1252 1252 ];
1253 1253 src = fetchurl {
1254 1254 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1255 1255 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1256 1256 };
1257 1257 meta = {
1258 1258 license = [ pkgs.lib.licenses.mit ];
1259 1259 };
1260 1260 };
1261 1261 "plaster-pastedeploy" = super.buildPythonPackage {
1262 1262 name = "plaster-pastedeploy-0.7";
1263 1263 doCheck = false;
1264 1264 propagatedBuildInputs = [
1265 1265 self."pastedeploy"
1266 1266 self."plaster"
1267 1267 ];
1268 1268 src = fetchurl {
1269 1269 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1270 1270 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1271 1271 };
1272 1272 meta = {
1273 1273 license = [ pkgs.lib.licenses.mit ];
1274 1274 };
1275 1275 };
1276 1276 "pluggy" = super.buildPythonPackage {
1277 1277 name = "pluggy-0.13.1";
1278 1278 doCheck = false;
1279 1279 propagatedBuildInputs = [
1280 1280 self."importlib-metadata"
1281 1281 ];
1282 1282 src = fetchurl {
1283 1283 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
1284 1284 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
1285 1285 };
1286 1286 meta = {
1287 1287 license = [ pkgs.lib.licenses.mit ];
1288 1288 };
1289 1289 };
1290 1290 "premailer" = super.buildPythonPackage {
1291 1291 name = "premailer-3.6.1";
1292 1292 doCheck = false;
1293 1293 propagatedBuildInputs = [
1294 1294 self."lxml"
1295 1295 self."cssselect"
1296 1296 self."cssutils"
1297 1297 self."requests"
1298 1298 self."cachetools"
1299 1299 ];
1300 1300 src = fetchurl {
1301 1301 url = "https://files.pythonhosted.org/packages/62/da/2f43cdf9d3d79c80c4856a12389a1f257d65fe9ccc44bc6b4383c8a18e33/premailer-3.6.1.tar.gz";
1302 1302 sha256 = "08pshx7a110k4ll20x0xhpvyn3kkipkrbgxjjn7ncdxs54ihdhgw";
1303 1303 };
1304 1304 meta = {
1305 1305 license = [ pkgs.lib.licenses.psfl { fullName = "Python"; } ];
1306 1306 };
1307 1307 };
1308 1308 "prompt-toolkit" = super.buildPythonPackage {
1309 1309 name = "prompt-toolkit-1.0.18";
1310 1310 doCheck = false;
1311 1311 propagatedBuildInputs = [
1312 1312 self."six"
1313 1313 self."wcwidth"
1314 1314 ];
1315 1315 src = fetchurl {
1316 1316 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
1317 1317 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
1318 1318 };
1319 1319 meta = {
1320 1320 license = [ pkgs.lib.licenses.bsdOriginal ];
1321 1321 };
1322 1322 };
1323 1323 "psutil" = super.buildPythonPackage {
1324 1324 name = "psutil-5.7.0";
1325 1325 doCheck = false;
1326 1326 src = fetchurl {
1327 1327 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
1328 1328 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
1329 1329 };
1330 1330 meta = {
1331 1331 license = [ pkgs.lib.licenses.bsdOriginal ];
1332 1332 };
1333 1333 };
1334 1334 "psycopg2" = super.buildPythonPackage {
1335 1335 name = "psycopg2-2.8.4";
1336 1336 doCheck = false;
1337 1337 src = fetchurl {
1338 1338 url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz";
1339 1339 sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q";
1340 1340 };
1341 1341 meta = {
1342 1342 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1343 1343 };
1344 1344 };
1345 1345 "ptyprocess" = super.buildPythonPackage {
1346 1346 name = "ptyprocess-0.6.0";
1347 1347 doCheck = false;
1348 1348 src = fetchurl {
1349 1349 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1350 1350 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1351 1351 };
1352 1352 meta = {
1353 1353 license = [ ];
1354 1354 };
1355 1355 };
1356 1356 "py" = super.buildPythonPackage {
1357 1357 name = "py-1.8.0";
1358 1358 doCheck = false;
1359 1359 src = fetchurl {
1360 1360 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
1361 1361 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
1362 1362 };
1363 1363 meta = {
1364 1364 license = [ pkgs.lib.licenses.mit ];
1365 1365 };
1366 1366 };
1367 1367 "py-bcrypt" = super.buildPythonPackage {
1368 1368 name = "py-bcrypt-0.4";
1369 1369 doCheck = false;
1370 1370 src = fetchurl {
1371 1371 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1372 1372 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1373 1373 };
1374 1374 meta = {
1375 1375 license = [ pkgs.lib.licenses.bsdOriginal ];
1376 1376 };
1377 1377 };
1378 1378 "py-gfm" = super.buildPythonPackage {
1379 1379 name = "py-gfm-0.1.4";
1380 1380 doCheck = false;
1381 1381 propagatedBuildInputs = [
1382 1382 self."setuptools"
1383 1383 self."markdown"
1384 1384 ];
1385 1385 src = fetchurl {
1386 1386 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1387 1387 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1388 1388 };
1389 1389 meta = {
1390 1390 license = [ pkgs.lib.licenses.bsdOriginal ];
1391 1391 };
1392 1392 };
1393 1393 "pyasn1" = super.buildPythonPackage {
1394 1394 name = "pyasn1-0.4.8";
1395 1395 doCheck = false;
1396 1396 src = fetchurl {
1397 1397 url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz";
1398 1398 sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf";
1399 1399 };
1400 1400 meta = {
1401 1401 license = [ pkgs.lib.licenses.bsdOriginal ];
1402 1402 };
1403 1403 };
1404 1404 "pyasn1-modules" = super.buildPythonPackage {
1405 1405 name = "pyasn1-modules-0.2.6";
1406 1406 doCheck = false;
1407 1407 propagatedBuildInputs = [
1408 1408 self."pyasn1"
1409 1409 ];
1410 1410 src = fetchurl {
1411 1411 url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz";
1412 1412 sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3";
1413 1413 };
1414 1414 meta = {
1415 1415 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
1416 1416 };
1417 1417 };
1418 1418 "pycparser" = super.buildPythonPackage {
1419 1419 name = "pycparser-2.20";
1420 1420 doCheck = false;
1421 1421 src = fetchurl {
1422 1422 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
1423 1423 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
1424 1424 };
1425 1425 meta = {
1426 1426 license = [ pkgs.lib.licenses.bsdOriginal ];
1427 1427 };
1428 1428 };
1429 1429 "pycrypto" = super.buildPythonPackage {
1430 1430 name = "pycrypto-2.6.1";
1431 1431 doCheck = false;
1432 1432 src = fetchurl {
1433 1433 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1434 1434 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1435 1435 };
1436 1436 meta = {
1437 1437 license = [ pkgs.lib.licenses.publicDomain ];
1438 1438 };
1439 1439 };
1440 1440 "pycurl" = super.buildPythonPackage {
1441 1441 name = "pycurl-7.43.0.3";
1442 1442 doCheck = false;
1443 1443 src = fetchurl {
1444 1444 url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz";
1445 1445 sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g";
1446 1446 };
1447 1447 meta = {
1448 1448 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1449 1449 };
1450 1450 };
1451 1451 "pygments" = super.buildPythonPackage {
1452 1452 name = "pygments-2.4.2";
1453 1453 doCheck = false;
1454 1454 src = fetchurl {
1455 1455 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
1456 1456 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
1457 1457 };
1458 1458 meta = {
1459 1459 license = [ pkgs.lib.licenses.bsdOriginal ];
1460 1460 };
1461 1461 };
1462 1462 "pymysql" = super.buildPythonPackage {
1463 1463 name = "pymysql-0.8.1";
1464 1464 doCheck = false;
1465 1465 src = fetchurl {
1466 1466 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1467 1467 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1468 1468 };
1469 1469 meta = {
1470 1470 license = [ pkgs.lib.licenses.mit ];
1471 1471 };
1472 1472 };
1473 1473 "pyotp" = super.buildPythonPackage {
1474 1474 name = "pyotp-2.3.0";
1475 1475 doCheck = false;
1476 1476 src = fetchurl {
1477 1477 url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz";
1478 1478 sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw";
1479 1479 };
1480 1480 meta = {
1481 1481 license = [ pkgs.lib.licenses.mit ];
1482 1482 };
1483 1483 };
1484 1484 "pyparsing" = super.buildPythonPackage {
1485 1485 name = "pyparsing-2.4.7";
1486 1486 doCheck = false;
1487 1487 src = fetchurl {
1488 1488 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
1489 1489 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
1490 1490 };
1491 1491 meta = {
1492 1492 license = [ pkgs.lib.licenses.mit ];
1493 1493 };
1494 1494 };
1495 1495 "pyramid" = super.buildPythonPackage {
1496 1496 name = "pyramid-1.10.4";
1497 1497 doCheck = false;
1498 1498 propagatedBuildInputs = [
1499 1499 self."hupper"
1500 1500 self."plaster"
1501 1501 self."plaster-pastedeploy"
1502 1502 self."setuptools"
1503 1503 self."translationstring"
1504 1504 self."venusian"
1505 1505 self."webob"
1506 1506 self."zope.deprecation"
1507 1507 self."zope.interface"
1508 1508 self."repoze.lru"
1509 1509 ];
1510 1510 src = fetchurl {
1511 1511 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1512 1512 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1513 1513 };
1514 1514 meta = {
1515 1515 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1516 1516 };
1517 1517 };
1518 1518 "pyramid-debugtoolbar" = super.buildPythonPackage {
1519 1519 name = "pyramid-debugtoolbar-4.6.1";
1520 1520 doCheck = false;
1521 1521 propagatedBuildInputs = [
1522 1522 self."pyramid"
1523 1523 self."pyramid-mako"
1524 1524 self."repoze.lru"
1525 1525 self."pygments"
1526 1526 self."ipaddress"
1527 1527 ];
1528 1528 src = fetchurl {
1529 1529 url = "https://files.pythonhosted.org/packages/99/f6/b8603f82c18275be293921bc3a2184205056ca505747bf64ab8a0c08e124/pyramid_debugtoolbar-4.6.1.tar.gz";
1530 1530 sha256 = "185z7q8n959ga5331iczwra2iljwkidfx4qn6bbd7vm3rm4w6llv";
1531 1531 };
1532 1532 meta = {
1533 1533 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1534 1534 };
1535 1535 };
1536 1536 "pyramid-jinja2" = super.buildPythonPackage {
1537 1537 name = "pyramid-jinja2-2.7";
1538 1538 doCheck = false;
1539 1539 propagatedBuildInputs = [
1540 1540 self."pyramid"
1541 1541 self."zope.deprecation"
1542 1542 self."jinja2"
1543 1543 self."markupsafe"
1544 1544 ];
1545 1545 src = fetchurl {
1546 1546 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1547 1547 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1548 1548 };
1549 1549 meta = {
1550 1550 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1551 1551 };
1552 1552 };
1553 1553 "pyramid-apispec" = super.buildPythonPackage {
1554 1554 name = "pyramid-apispec-0.3.2";
1555 1555 doCheck = false;
1556 1556 propagatedBuildInputs = [
1557 1557 self."apispec"
1558 1558 ];
1559 1559 src = fetchurl {
1560 1560 url = "https://files.pythonhosted.org/packages/2a/30/1dea5d81ea635449572ba60ec3148310d75ae4530c3c695f54b0991bb8c7/pyramid_apispec-0.3.2.tar.gz";
1561 1561 sha256 = "0ffrcqp9dkykivhfcq0v9lgy6w0qhwl6x78925vfjmayly9r8da0";
1562 1562 };
1563 1563 meta = {
1564 1564 license = [ pkgs.lib.licenses.bsdOriginal ];
1565 1565 };
1566 1566 };
1567 1567 "pyramid-mailer" = super.buildPythonPackage {
1568 1568 name = "pyramid-mailer-0.15.1";
1569 1569 doCheck = false;
1570 1570 propagatedBuildInputs = [
1571 1571 self."pyramid"
1572 1572 self."repoze.sendmail"
1573 1573 self."transaction"
1574 1574 ];
1575 1575 src = fetchurl {
1576 1576 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1577 1577 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1578 1578 };
1579 1579 meta = {
1580 1580 license = [ pkgs.lib.licenses.bsdOriginal ];
1581 1581 };
1582 1582 };
1583 1583 "pyramid-mako" = super.buildPythonPackage {
1584 1584 name = "pyramid-mako-1.1.0";
1585 1585 doCheck = false;
1586 1586 propagatedBuildInputs = [
1587 1587 self."pyramid"
1588 1588 self."mako"
1589 1589 ];
1590 1590 src = fetchurl {
1591 1591 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
1592 1592 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
1593 1593 };
1594 1594 meta = {
1595 1595 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1596 1596 };
1597 1597 };
1598 1598 "pysqlite" = super.buildPythonPackage {
1599 1599 name = "pysqlite-2.8.3";
1600 1600 doCheck = false;
1601 1601 src = fetchurl {
1602 1602 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1603 1603 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1604 1604 };
1605 1605 meta = {
1606 1606 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1607 1607 };
1608 1608 };
1609 1609 "pytest" = super.buildPythonPackage {
1610 1610 name = "pytest-4.6.5";
1611 1611 doCheck = false;
1612 1612 propagatedBuildInputs = [
1613 1613 self."py"
1614 1614 self."six"
1615 1615 self."packaging"
1616 1616 self."attrs"
1617 1617 self."atomicwrites"
1618 1618 self."pluggy"
1619 1619 self."importlib-metadata"
1620 1620 self."wcwidth"
1621 1621 self."funcsigs"
1622 1622 self."pathlib2"
1623 1623 self."more-itertools"
1624 1624 ];
1625 1625 src = fetchurl {
1626 1626 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
1627 1627 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
1628 1628 };
1629 1629 meta = {
1630 1630 license = [ pkgs.lib.licenses.mit ];
1631 1631 };
1632 1632 };
1633 1633 "pytest-cov" = super.buildPythonPackage {
1634 1634 name = "pytest-cov-2.7.1";
1635 1635 doCheck = false;
1636 1636 propagatedBuildInputs = [
1637 1637 self."pytest"
1638 1638 self."coverage"
1639 1639 ];
1640 1640 src = fetchurl {
1641 1641 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
1642 1642 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
1643 1643 };
1644 1644 meta = {
1645 1645 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1646 1646 };
1647 1647 };
1648 1648 "pytest-profiling" = super.buildPythonPackage {
1649 1649 name = "pytest-profiling-1.7.0";
1650 1650 doCheck = false;
1651 1651 propagatedBuildInputs = [
1652 1652 self."six"
1653 1653 self."pytest"
1654 1654 self."gprof2dot"
1655 1655 ];
1656 1656 src = fetchurl {
1657 1657 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
1658 1658 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
1659 1659 };
1660 1660 meta = {
1661 1661 license = [ pkgs.lib.licenses.mit ];
1662 1662 };
1663 1663 };
1664 1664 "pytest-runner" = super.buildPythonPackage {
1665 1665 name = "pytest-runner-5.1";
1666 1666 doCheck = false;
1667 1667 src = fetchurl {
1668 1668 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
1669 1669 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
1670 1670 };
1671 1671 meta = {
1672 1672 license = [ pkgs.lib.licenses.mit ];
1673 1673 };
1674 1674 };
1675 1675 "pytest-sugar" = super.buildPythonPackage {
1676 1676 name = "pytest-sugar-0.9.2";
1677 1677 doCheck = false;
1678 1678 propagatedBuildInputs = [
1679 1679 self."pytest"
1680 1680 self."termcolor"
1681 1681 self."packaging"
1682 1682 ];
1683 1683 src = fetchurl {
1684 1684 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
1685 1685 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
1686 1686 };
1687 1687 meta = {
1688 1688 license = [ pkgs.lib.licenses.bsdOriginal ];
1689 1689 };
1690 1690 };
1691 1691 "pytest-timeout" = super.buildPythonPackage {
1692 1692 name = "pytest-timeout-1.3.3";
1693 1693 doCheck = false;
1694 1694 propagatedBuildInputs = [
1695 1695 self."pytest"
1696 1696 ];
1697 1697 src = fetchurl {
1698 1698 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
1699 1699 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
1700 1700 };
1701 1701 meta = {
1702 1702 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1703 1703 };
1704 1704 };
1705 1705 "python-dateutil" = super.buildPythonPackage {
1706 1706 name = "python-dateutil-2.8.1";
1707 1707 doCheck = false;
1708 1708 propagatedBuildInputs = [
1709 1709 self."six"
1710 1710 ];
1711 1711 src = fetchurl {
1712 1712 url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz";
1713 1713 sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk";
1714 1714 };
1715 1715 meta = {
1716 1716 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1717 1717 };
1718 1718 };
1719 1719 "python-editor" = super.buildPythonPackage {
1720 1720 name = "python-editor-1.0.4";
1721 1721 doCheck = false;
1722 1722 src = fetchurl {
1723 1723 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1724 1724 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1725 1725 };
1726 1726 meta = {
1727 1727 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1728 1728 };
1729 1729 };
1730 1730 "python-ldap" = super.buildPythonPackage {
1731 1731 name = "python-ldap-3.2.0";
1732 1732 doCheck = false;
1733 1733 propagatedBuildInputs = [
1734 1734 self."pyasn1"
1735 1735 self."pyasn1-modules"
1736 1736 ];
1737 1737 src = fetchurl {
1738 1738 url = "https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz";
1739 1739 sha256 = "13nvrhp85yr0jyxixcjj012iw8l9wynxxlykm9j3alss6waln73x";
1740 1740 };
1741 1741 meta = {
1742 1742 license = [ pkgs.lib.licenses.psfl ];
1743 1743 };
1744 1744 };
1745 1745 "python-memcached" = super.buildPythonPackage {
1746 1746 name = "python-memcached-1.59";
1747 1747 doCheck = false;
1748 1748 propagatedBuildInputs = [
1749 1749 self."six"
1750 1750 ];
1751 1751 src = fetchurl {
1752 1752 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1753 1753 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1754 1754 };
1755 1755 meta = {
1756 1756 license = [ pkgs.lib.licenses.psfl ];
1757 1757 };
1758 1758 };
1759 1759 "python-pam" = super.buildPythonPackage {
1760 1760 name = "python-pam-1.8.4";
1761 1761 doCheck = false;
1762 1762 src = fetchurl {
1763 1763 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1764 1764 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1765 1765 };
1766 1766 meta = {
1767 1767 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1768 1768 };
1769 1769 };
1770 1770 "python-saml" = super.buildPythonPackage {
1771 1771 name = "python-saml-2.4.2";
1772 1772 doCheck = false;
1773 1773 propagatedBuildInputs = [
1774 1774 self."dm.xmlsec.binding"
1775 1775 self."isodate"
1776 1776 self."defusedxml"
1777 1777 ];
1778 1778 src = fetchurl {
1779 1779 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1780 1780 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1781 1781 };
1782 1782 meta = {
1783 1783 license = [ pkgs.lib.licenses.mit ];
1784 1784 };
1785 1785 };
1786 1786 "pytz" = super.buildPythonPackage {
1787 1787 name = "pytz-2019.3";
1788 1788 doCheck = false;
1789 1789 src = fetchurl {
1790 1790 url = "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz";
1791 1791 sha256 = "1ghrk1wg45d3nymj7bf4zj03n3bh64xmczhk4pfi577hdkdhcb5h";
1792 1792 };
1793 1793 meta = {
1794 1794 license = [ pkgs.lib.licenses.mit ];
1795 1795 };
1796 1796 };
1797 1797 "pyzmq" = super.buildPythonPackage {
1798 1798 name = "pyzmq-14.6.0";
1799 1799 doCheck = false;
1800 1800 src = fetchurl {
1801 1801 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1802 1802 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1803 1803 };
1804 1804 meta = {
1805 1805 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1806 1806 };
1807 1807 };
1808 1808 "PyYAML" = super.buildPythonPackage {
1809 1809 name = "PyYAML-5.3.1";
1810 1810 doCheck = false;
1811 1811 src = fetchurl {
1812 1812 url = "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz";
1813 1813 sha256 = "0pb4zvkfxfijkpgd1b86xjsqql97ssf1knbd1v53wkg1qm9cgsmq";
1814 1814 };
1815 1815 meta = {
1816 1816 license = [ pkgs.lib.licenses.mit ];
1817 1817 };
1818 1818 };
1819 1819 "regex" = super.buildPythonPackage {
1820 1820 name = "regex-2020.9.27";
1821 1821 doCheck = false;
1822 1822 src = fetchurl {
1823 1823 url = "https://files.pythonhosted.org/packages/93/8c/17f45cdfb39b13d4b5f909e4b4c2917abcbdef9c0036919a0399769148cf/regex-2020.9.27.tar.gz";
1824 1824 sha256 = "179ngfzwbsjvn5vhyzdahvmg0f7acahkwwy9bpjy1pv08bm2mwx6";
1825 1825 };
1826 1826 meta = {
1827 1827 license = [ pkgs.lib.licenses.psfl ];
1828 1828 };
1829 1829 };
1830 1830 "redis" = super.buildPythonPackage {
1831 1831 name = "redis-3.5.3";
1832 1832 doCheck = false;
1833 1833 src = fetchurl {
1834 1834 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
1835 1835 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
1836 1836 };
1837 1837 meta = {
1838 1838 license = [ pkgs.lib.licenses.mit ];
1839 1839 };
1840 1840 };
1841 1841 "repoze.lru" = super.buildPythonPackage {
1842 1842 name = "repoze.lru-0.7";
1843 1843 doCheck = false;
1844 1844 src = fetchurl {
1845 1845 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1846 1846 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1847 1847 };
1848 1848 meta = {
1849 1849 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1850 1850 };
1851 1851 };
1852 1852 "repoze.sendmail" = super.buildPythonPackage {
1853 1853 name = "repoze.sendmail-4.4.1";
1854 1854 doCheck = false;
1855 1855 propagatedBuildInputs = [
1856 1856 self."setuptools"
1857 1857 self."zope.interface"
1858 1858 self."transaction"
1859 1859 ];
1860 1860 src = fetchurl {
1861 1861 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1862 1862 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1863 1863 };
1864 1864 meta = {
1865 1865 license = [ pkgs.lib.licenses.zpl21 ];
1866 1866 };
1867 1867 };
1868 1868 "requests" = super.buildPythonPackage {
1869 1869 name = "requests-2.22.0";
1870 1870 doCheck = false;
1871 1871 propagatedBuildInputs = [
1872 1872 self."chardet"
1873 1873 self."idna"
1874 1874 self."urllib3"
1875 1875 self."certifi"
1876 1876 ];
1877 1877 src = fetchurl {
1878 1878 url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz";
1879 1879 sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i";
1880 1880 };
1881 1881 meta = {
1882 1882 license = [ pkgs.lib.licenses.asl20 ];
1883 1883 };
1884 1884 };
1885 1885 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1886 name = "rhodecode-enterprise-ce-4.25.2";
1886 name = "rhodecode-enterprise-ce-4.26.0";
1887 1887 buildInputs = [
1888 1888 self."pytest"
1889 1889 self."py"
1890 1890 self."pytest-cov"
1891 1891 self."pytest-sugar"
1892 1892 self."pytest-runner"
1893 1893 self."pytest-profiling"
1894 1894 self."pytest-timeout"
1895 1895 self."gprof2dot"
1896 1896 self."mock"
1897 1897 self."cov-core"
1898 1898 self."coverage"
1899 1899 self."webtest"
1900 1900 self."beautifulsoup4"
1901 1901 self."configobj"
1902 1902 ];
1903 1903 doCheck = true;
1904 1904 propagatedBuildInputs = [
1905 1905 self."amqp"
1906 1906 self."babel"
1907 1907 self."beaker"
1908 1908 self."bleach"
1909 1909 self."celery"
1910 1910 self."channelstream"
1911 1911 self."click"
1912 1912 self."colander"
1913 1913 self."configobj"
1914 1914 self."cssselect"
1915 1915 self."cryptography"
1916 1916 self."decorator"
1917 1917 self."deform"
1918 1918 self."docutils"
1919 1919 self."dogpile.cache"
1920 1920 self."dogpile.core"
1921 1921 self."formencode"
1922 1922 self."future"
1923 1923 self."futures"
1924 1924 self."infrae.cache"
1925 1925 self."iso8601"
1926 1926 self."itsdangerous"
1927 1927 self."kombu"
1928 1928 self."lxml"
1929 1929 self."mako"
1930 1930 self."markdown"
1931 1931 self."markupsafe"
1932 1932 self."msgpack-python"
1933 1933 self."pyotp"
1934 1934 self."packaging"
1935 1935 self."pathlib2"
1936 1936 self."paste"
1937 1937 self."pastedeploy"
1938 1938 self."pastescript"
1939 1939 self."peppercorn"
1940 1940 self."premailer"
1941 1941 self."psutil"
1942 1942 self."py-bcrypt"
1943 1943 self."pycurl"
1944 1944 self."pycrypto"
1945 1945 self."pygments"
1946 1946 self."pyparsing"
1947 1947 self."pyramid-debugtoolbar"
1948 1948 self."pyramid-mako"
1949 1949 self."pyramid"
1950 1950 self."pyramid-mailer"
1951 1951 self."python-dateutil"
1952 1952 self."python-ldap"
1953 1953 self."python-memcached"
1954 1954 self."python-pam"
1955 1955 self."python-saml"
1956 1956 self."pytz"
1957 1957 self."tzlocal"
1958 1958 self."pyzmq"
1959 1959 self."py-gfm"
1960 1960 self."regex"
1961 1961 self."redis"
1962 1962 self."repoze.lru"
1963 1963 self."requests"
1964 1964 self."routes"
1965 1965 self."simplejson"
1966 1966 self."six"
1967 1967 self."sqlalchemy"
1968 1968 self."sshpubkeys"
1969 1969 self."subprocess32"
1970 1970 self."supervisor"
1971 1971 self."translationstring"
1972 1972 self."urllib3"
1973 1973 self."urlobject"
1974 1974 self."venusian"
1975 1975 self."weberror"
1976 1976 self."webhelpers2"
1977 1977 self."webob"
1978 1978 self."whoosh"
1979 1979 self."wsgiref"
1980 1980 self."zope.cachedescriptors"
1981 1981 self."zope.deprecation"
1982 1982 self."zope.event"
1983 1983 self."zope.interface"
1984 1984 self."mysql-python"
1985 1985 self."pymysql"
1986 1986 self."pysqlite"
1987 1987 self."psycopg2"
1988 1988 self."nbconvert"
1989 1989 self."nbformat"
1990 1990 self."jupyter-client"
1991 1991 self."jupyter-core"
1992 1992 self."alembic"
1993 1993 self."invoke"
1994 1994 self."bumpversion"
1995 1995 self."gevent"
1996 1996 self."greenlet"
1997 1997 self."gunicorn"
1998 1998 self."waitress"
1999 1999 self."ipdb"
2000 2000 self."ipython"
2001 2001 self."rhodecode-tools"
2002 2002 self."appenlight-client"
2003 2003 self."pytest"
2004 2004 self."py"
2005 2005 self."pytest-cov"
2006 2006 self."pytest-sugar"
2007 2007 self."pytest-runner"
2008 2008 self."pytest-profiling"
2009 2009 self."pytest-timeout"
2010 2010 self."gprof2dot"
2011 2011 self."mock"
2012 2012 self."cov-core"
2013 2013 self."coverage"
2014 2014 self."webtest"
2015 2015 self."beautifulsoup4"
2016 2016 ];
2017 2017 src = ./.;
2018 2018 meta = {
2019 2019 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
2020 2020 };
2021 2021 };
2022 2022 "rhodecode-tools" = super.buildPythonPackage {
2023 2023 name = "rhodecode-tools-1.4.0";
2024 2024 doCheck = false;
2025 2025 propagatedBuildInputs = [
2026 2026 self."click"
2027 2027 self."future"
2028 2028 self."six"
2029 2029 self."mako"
2030 2030 self."markupsafe"
2031 2031 self."requests"
2032 2032 self."urllib3"
2033 2033 self."whoosh"
2034 2034 self."elasticsearch"
2035 2035 self."elasticsearch-dsl"
2036 2036 self."elasticsearch2"
2037 2037 self."elasticsearch1-dsl"
2038 2038 ];
2039 2039 src = fetchurl {
2040 2040 url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a";
2041 2041 sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n";
2042 2042 };
2043 2043 meta = {
2044 2044 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
2045 2045 };
2046 2046 };
2047 2047 "routes" = super.buildPythonPackage {
2048 2048 name = "routes-2.4.1";
2049 2049 doCheck = false;
2050 2050 propagatedBuildInputs = [
2051 2051 self."six"
2052 2052 self."repoze.lru"
2053 2053 ];
2054 2054 src = fetchurl {
2055 2055 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
2056 2056 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
2057 2057 };
2058 2058 meta = {
2059 2059 license = [ pkgs.lib.licenses.mit ];
2060 2060 };
2061 2061 };
2062 2062 "scandir" = super.buildPythonPackage {
2063 2063 name = "scandir-1.10.0";
2064 2064 doCheck = false;
2065 2065 src = fetchurl {
2066 2066 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
2067 2067 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
2068 2068 };
2069 2069 meta = {
2070 2070 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
2071 2071 };
2072 2072 };
2073 2073 "setproctitle" = super.buildPythonPackage {
2074 2074 name = "setproctitle-1.1.10";
2075 2075 doCheck = false;
2076 2076 src = fetchurl {
2077 2077 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
2078 2078 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
2079 2079 };
2080 2080 meta = {
2081 2081 license = [ pkgs.lib.licenses.bsdOriginal ];
2082 2082 };
2083 2083 };
2084 2084 "setuptools" = super.buildPythonPackage {
2085 2085 name = "setuptools-44.1.0";
2086 2086 doCheck = false;
2087 2087 src = fetchurl {
2088 2088 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
2089 2089 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
2090 2090 };
2091 2091 meta = {
2092 2092 license = [ pkgs.lib.licenses.mit ];
2093 2093 };
2094 2094 };
2095 2095 "setuptools-scm" = super.buildPythonPackage {
2096 2096 name = "setuptools-scm-3.5.0";
2097 2097 doCheck = false;
2098 2098 src = fetchurl {
2099 2099 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
2100 2100 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
2101 2101 };
2102 2102 meta = {
2103 2103 license = [ pkgs.lib.licenses.psfl ];
2104 2104 };
2105 2105 };
2106 2106 "simplegeneric" = super.buildPythonPackage {
2107 2107 name = "simplegeneric-0.8.1";
2108 2108 doCheck = false;
2109 2109 src = fetchurl {
2110 2110 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
2111 2111 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
2112 2112 };
2113 2113 meta = {
2114 2114 license = [ pkgs.lib.licenses.zpl21 ];
2115 2115 };
2116 2116 };
2117 2117 "simplejson" = super.buildPythonPackage {
2118 2118 name = "simplejson-3.16.0";
2119 2119 doCheck = false;
2120 2120 src = fetchurl {
2121 2121 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
2122 2122 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
2123 2123 };
2124 2124 meta = {
2125 2125 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
2126 2126 };
2127 2127 };
2128 2128 "six" = super.buildPythonPackage {
2129 2129 name = "six-1.11.0";
2130 2130 doCheck = false;
2131 2131 src = fetchurl {
2132 2132 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
2133 2133 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
2134 2134 };
2135 2135 meta = {
2136 2136 license = [ pkgs.lib.licenses.mit ];
2137 2137 };
2138 2138 };
2139 2139 "sqlalchemy" = super.buildPythonPackage {
2140 2140 name = "sqlalchemy-1.3.15";
2141 2141 doCheck = false;
2142 2142 src = fetchurl {
2143 2143 url = "https://files.pythonhosted.org/packages/8c/30/4134e726dd5ed13728ff814fa91fc01c447ad8700504653fe99d91fdd34b/SQLAlchemy-1.3.15.tar.gz";
2144 2144 sha256 = "0iglkvymfp35zm5pxy5kzqvcv96kkas0chqdx7xpla86sspa9k64";
2145 2145 };
2146 2146 meta = {
2147 2147 license = [ pkgs.lib.licenses.mit ];
2148 2148 };
2149 2149 };
2150 2150 "sshpubkeys" = super.buildPythonPackage {
2151 2151 name = "sshpubkeys-3.1.0";
2152 2152 doCheck = false;
2153 2153 propagatedBuildInputs = [
2154 2154 self."cryptography"
2155 2155 self."ecdsa"
2156 2156 ];
2157 2157 src = fetchurl {
2158 2158 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
2159 2159 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
2160 2160 };
2161 2161 meta = {
2162 2162 license = [ pkgs.lib.licenses.bsdOriginal ];
2163 2163 };
2164 2164 };
2165 2165 "subprocess32" = super.buildPythonPackage {
2166 2166 name = "subprocess32-3.5.4";
2167 2167 doCheck = false;
2168 2168 src = fetchurl {
2169 2169 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
2170 2170 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
2171 2171 };
2172 2172 meta = {
2173 2173 license = [ pkgs.lib.licenses.psfl ];
2174 2174 };
2175 2175 };
2176 2176 "supervisor" = super.buildPythonPackage {
2177 2177 name = "supervisor-4.1.0";
2178 2178 doCheck = false;
2179 2179 src = fetchurl {
2180 2180 url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz";
2181 2181 sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d";
2182 2182 };
2183 2183 meta = {
2184 2184 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2185 2185 };
2186 2186 };
2187 2187 "tempita" = super.buildPythonPackage {
2188 2188 name = "tempita-0.5.2";
2189 2189 doCheck = false;
2190 2190 src = fetchurl {
2191 2191 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2192 2192 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2193 2193 };
2194 2194 meta = {
2195 2195 license = [ pkgs.lib.licenses.mit ];
2196 2196 };
2197 2197 };
2198 2198 "termcolor" = super.buildPythonPackage {
2199 2199 name = "termcolor-1.1.0";
2200 2200 doCheck = false;
2201 2201 src = fetchurl {
2202 2202 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2203 2203 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2204 2204 };
2205 2205 meta = {
2206 2206 license = [ pkgs.lib.licenses.mit ];
2207 2207 };
2208 2208 };
2209 2209 "testpath" = super.buildPythonPackage {
2210 2210 name = "testpath-0.4.4";
2211 2211 doCheck = false;
2212 2212 src = fetchurl {
2213 2213 url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz";
2214 2214 sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30";
2215 2215 };
2216 2216 meta = {
2217 2217 license = [ ];
2218 2218 };
2219 2219 };
2220 2220 "traitlets" = super.buildPythonPackage {
2221 2221 name = "traitlets-4.3.3";
2222 2222 doCheck = false;
2223 2223 propagatedBuildInputs = [
2224 2224 self."ipython-genutils"
2225 2225 self."six"
2226 2226 self."decorator"
2227 2227 self."enum34"
2228 2228 ];
2229 2229 src = fetchurl {
2230 2230 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
2231 2231 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
2232 2232 };
2233 2233 meta = {
2234 2234 license = [ pkgs.lib.licenses.bsdOriginal ];
2235 2235 };
2236 2236 };
2237 2237 "transaction" = super.buildPythonPackage {
2238 2238 name = "transaction-2.4.0";
2239 2239 doCheck = false;
2240 2240 propagatedBuildInputs = [
2241 2241 self."zope.interface"
2242 2242 ];
2243 2243 src = fetchurl {
2244 2244 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2245 2245 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2246 2246 };
2247 2247 meta = {
2248 2248 license = [ pkgs.lib.licenses.zpl21 ];
2249 2249 };
2250 2250 };
2251 2251 "translationstring" = super.buildPythonPackage {
2252 2252 name = "translationstring-1.3";
2253 2253 doCheck = false;
2254 2254 src = fetchurl {
2255 2255 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2256 2256 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2257 2257 };
2258 2258 meta = {
2259 2259 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2260 2260 };
2261 2261 };
2262 2262 "tzlocal" = super.buildPythonPackage {
2263 2263 name = "tzlocal-1.5.1";
2264 2264 doCheck = false;
2265 2265 propagatedBuildInputs = [
2266 2266 self."pytz"
2267 2267 ];
2268 2268 src = fetchurl {
2269 2269 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2270 2270 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2271 2271 };
2272 2272 meta = {
2273 2273 license = [ pkgs.lib.licenses.mit ];
2274 2274 };
2275 2275 };
2276 2276 "urllib3" = super.buildPythonPackage {
2277 2277 name = "urllib3-1.25.2";
2278 2278 doCheck = false;
2279 2279 src = fetchurl {
2280 2280 url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz";
2281 2281 sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55";
2282 2282 };
2283 2283 meta = {
2284 2284 license = [ pkgs.lib.licenses.mit ];
2285 2285 };
2286 2286 };
2287 2287 "urlobject" = super.buildPythonPackage {
2288 2288 name = "urlobject-2.4.3";
2289 2289 doCheck = false;
2290 2290 src = fetchurl {
2291 2291 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2292 2292 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2293 2293 };
2294 2294 meta = {
2295 2295 license = [ pkgs.lib.licenses.publicDomain ];
2296 2296 };
2297 2297 };
2298 2298 "venusian" = super.buildPythonPackage {
2299 2299 name = "venusian-1.2.0";
2300 2300 doCheck = false;
2301 2301 src = fetchurl {
2302 2302 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2303 2303 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2304 2304 };
2305 2305 meta = {
2306 2306 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2307 2307 };
2308 2308 };
2309 2309 "vine" = super.buildPythonPackage {
2310 2310 name = "vine-1.3.0";
2311 2311 doCheck = false;
2312 2312 src = fetchurl {
2313 2313 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2314 2314 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2315 2315 };
2316 2316 meta = {
2317 2317 license = [ pkgs.lib.licenses.bsdOriginal ];
2318 2318 };
2319 2319 };
2320 2320 "waitress" = super.buildPythonPackage {
2321 2321 name = "waitress-1.3.1";
2322 2322 doCheck = false;
2323 2323 src = fetchurl {
2324 2324 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
2325 2325 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
2326 2326 };
2327 2327 meta = {
2328 2328 license = [ pkgs.lib.licenses.zpl21 ];
2329 2329 };
2330 2330 };
2331 2331 "wcwidth" = super.buildPythonPackage {
2332 2332 name = "wcwidth-0.1.9";
2333 2333 doCheck = false;
2334 2334 src = fetchurl {
2335 2335 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
2336 2336 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
2337 2337 };
2338 2338 meta = {
2339 2339 license = [ pkgs.lib.licenses.mit ];
2340 2340 };
2341 2341 };
2342 2342 "webencodings" = super.buildPythonPackage {
2343 2343 name = "webencodings-0.5.1";
2344 2344 doCheck = false;
2345 2345 src = fetchurl {
2346 2346 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2347 2347 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2348 2348 };
2349 2349 meta = {
2350 2350 license = [ pkgs.lib.licenses.bsdOriginal ];
2351 2351 };
2352 2352 };
2353 2353 "weberror" = super.buildPythonPackage {
2354 2354 name = "weberror-0.13.1";
2355 2355 doCheck = false;
2356 2356 propagatedBuildInputs = [
2357 2357 self."webob"
2358 2358 self."tempita"
2359 2359 self."pygments"
2360 2360 self."paste"
2361 2361 ];
2362 2362 src = fetchurl {
2363 2363 url = "https://files.pythonhosted.org/packages/07/0a/09ca5eb0fab5c0d17b380026babe81c96ecebb13f2b06c3203432dd7be72/WebError-0.13.1.tar.gz";
2364 2364 sha256 = "0r4qvnf2r92gfnpa1kwygh4j2x6j3axg2i4an6hyxwg2gpaqp7y1";
2365 2365 };
2366 2366 meta = {
2367 2367 license = [ pkgs.lib.licenses.mit ];
2368 2368 };
2369 2369 };
2370 2370 "webhelpers2" = super.buildPythonPackage {
2371 2371 name = "webhelpers2-2.0";
2372 2372 doCheck = false;
2373 2373 propagatedBuildInputs = [
2374 2374 self."markupsafe"
2375 2375 self."six"
2376 2376 ];
2377 2377 src = fetchurl {
2378 2378 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2379 2379 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2380 2380 };
2381 2381 meta = {
2382 2382 license = [ pkgs.lib.licenses.mit ];
2383 2383 };
2384 2384 };
2385 2385 "webob" = super.buildPythonPackage {
2386 2386 name = "webob-1.8.5";
2387 2387 doCheck = false;
2388 2388 src = fetchurl {
2389 2389 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2390 2390 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2391 2391 };
2392 2392 meta = {
2393 2393 license = [ pkgs.lib.licenses.mit ];
2394 2394 };
2395 2395 };
2396 2396 "webtest" = super.buildPythonPackage {
2397 2397 name = "webtest-2.0.34";
2398 2398 doCheck = false;
2399 2399 propagatedBuildInputs = [
2400 2400 self."six"
2401 2401 self."webob"
2402 2402 self."waitress"
2403 2403 self."beautifulsoup4"
2404 2404 ];
2405 2405 src = fetchurl {
2406 2406 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
2407 2407 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
2408 2408 };
2409 2409 meta = {
2410 2410 license = [ pkgs.lib.licenses.mit ];
2411 2411 };
2412 2412 };
2413 2413 "whoosh" = super.buildPythonPackage {
2414 2414 name = "whoosh-2.7.4";
2415 2415 doCheck = false;
2416 2416 src = fetchurl {
2417 2417 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2418 2418 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2419 2419 };
2420 2420 meta = {
2421 2421 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2422 2422 };
2423 2423 };
2424 2424 "ws4py" = super.buildPythonPackage {
2425 2425 name = "ws4py-0.5.1";
2426 2426 doCheck = false;
2427 2427 src = fetchurl {
2428 2428 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2429 2429 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2430 2430 };
2431 2431 meta = {
2432 2432 license = [ pkgs.lib.licenses.bsdOriginal ];
2433 2433 };
2434 2434 };
2435 2435 "wsgiref" = super.buildPythonPackage {
2436 2436 name = "wsgiref-0.1.2";
2437 2437 doCheck = false;
2438 2438 src = fetchurl {
2439 2439 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2440 2440 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2441 2441 };
2442 2442 meta = {
2443 2443 license = [ { fullName = "PSF or ZPL"; } ];
2444 2444 };
2445 2445 };
2446 2446 "zipp" = super.buildPythonPackage {
2447 2447 name = "zipp-1.2.0";
2448 2448 doCheck = false;
2449 2449 propagatedBuildInputs = [
2450 2450 self."contextlib2"
2451 2451 ];
2452 2452 src = fetchurl {
2453 2453 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
2454 2454 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
2455 2455 };
2456 2456 meta = {
2457 2457 license = [ pkgs.lib.licenses.mit ];
2458 2458 };
2459 2459 };
2460 2460 "zope.cachedescriptors" = super.buildPythonPackage {
2461 2461 name = "zope.cachedescriptors-4.3.1";
2462 2462 doCheck = false;
2463 2463 propagatedBuildInputs = [
2464 2464 self."setuptools"
2465 2465 ];
2466 2466 src = fetchurl {
2467 2467 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2468 2468 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2469 2469 };
2470 2470 meta = {
2471 2471 license = [ pkgs.lib.licenses.zpl21 ];
2472 2472 };
2473 2473 };
2474 2474 "zope.deprecation" = super.buildPythonPackage {
2475 2475 name = "zope.deprecation-4.4.0";
2476 2476 doCheck = false;
2477 2477 propagatedBuildInputs = [
2478 2478 self."setuptools"
2479 2479 ];
2480 2480 src = fetchurl {
2481 2481 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2482 2482 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2483 2483 };
2484 2484 meta = {
2485 2485 license = [ pkgs.lib.licenses.zpl21 ];
2486 2486 };
2487 2487 };
2488 2488 "zope.event" = super.buildPythonPackage {
2489 2489 name = "zope.event-4.4";
2490 2490 doCheck = false;
2491 2491 propagatedBuildInputs = [
2492 2492 self."setuptools"
2493 2493 ];
2494 2494 src = fetchurl {
2495 2495 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2496 2496 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2497 2497 };
2498 2498 meta = {
2499 2499 license = [ pkgs.lib.licenses.zpl21 ];
2500 2500 };
2501 2501 };
2502 2502 "zope.interface" = super.buildPythonPackage {
2503 2503 name = "zope.interface-4.6.0";
2504 2504 doCheck = false;
2505 2505 propagatedBuildInputs = [
2506 2506 self."setuptools"
2507 2507 ];
2508 2508 src = fetchurl {
2509 2509 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2510 2510 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2511 2511 };
2512 2512 meta = {
2513 2513 license = [ pkgs.lib.licenses.zpl21 ];
2514 2514 };
2515 2515 };
2516 2516
2517 2517 ### Test requirements
2518 2518
2519 2519
2520 2520 }
@@ -1,1 +1,1 b''
1 4.25.2 No newline at end of file
1 4.26.0 No newline at end of file
@@ -1,55 +1,56 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from rhodecode import events
24 24 from rhodecode.lib import rc_cache
25 25
26 26 log = logging.getLogger(__name__)
27 27
28 28 # names of namespaces used for different permission related cached
29 29 # during flush operation we need to take care of all those
30 30 cache_namespaces = [
31 31 'cache_user_auth.{}',
32 32 'cache_user_repo_acl_ids.{}',
33 33 'cache_user_user_group_acl_ids.{}',
34 34 'cache_user_repo_group_acl_ids.{}'
35 35 ]
36 36
37 37
38 38 def trigger_user_permission_flush(event):
39 39 """
40 40 Subscriber to the `UserPermissionsChange`. This triggers the
41 41 automatic flush of permission caches, so the users affected receive new permissions
42 42 Right Away
43 43 """
44
44 invalidate = True
45 45 affected_user_ids = set(event.user_ids)
46 46 for user_id in affected_user_ids:
47 47 for cache_namespace_uid_tmpl in cache_namespaces:
48 48 cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id)
49 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid)
50 log.debug('Deleted %s cache keys for user_id: %s and namespace %s',
49 del_keys = rc_cache.clear_cache_namespace(
50 'cache_perms', cache_namespace_uid, invalidate=invalidate)
51 log.debug('Invalidated %s cache keys for user_id: %s and namespace %s',
51 52 del_keys, user_id, cache_namespace_uid)
52 53
53 54
54 55 def includeme(config):
55 56 config.add_subscriber(trigger_user_permission_flush, events.UserPermissionsChange)
@@ -1,254 +1,253 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 import formencode
24 24 import formencode.htmlfill
25 25
26 26 from pyramid.httpexceptions import HTTPFound
27 27
28 28 from pyramid.response import Response
29 29 from pyramid.renderers import render
30 30
31 31 from rhodecode import events
32 32 from rhodecode.apps._base import BaseAppView, DataGridAppView
33 33 from rhodecode.lib.auth import (
34 34 LoginRequired, NotAnonymous, CSRFRequired, HasPermissionAnyDecorator)
35 35 from rhodecode.lib import helpers as h, audit_logger
36 36 from rhodecode.lib.utils2 import safe_unicode
37 37
38 38 from rhodecode.model.forms import UserGroupForm
39 39 from rhodecode.model.permission import PermissionModel
40 40 from rhodecode.model.scm import UserGroupList
41 41 from rhodecode.model.db import (
42 42 or_, count, User, UserGroup, UserGroupMember, in_filter_generator)
43 43 from rhodecode.model.meta import Session
44 44 from rhodecode.model.user_group import UserGroupModel
45 45 from rhodecode.model.db import true
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class AdminUserGroupsView(BaseAppView, DataGridAppView):
51 51
52 52 def load_default_context(self):
53 53 c = self._get_local_tmpl_context()
54 54 PermissionModel().set_global_permission_choices(
55 55 c, gettext_translator=self.request.translate)
56 56 return c
57 57
58 58 # permission check in data loading of
59 59 # `user_groups_list_data` via UserGroupList
60 60 @LoginRequired()
61 61 @NotAnonymous()
62 62 def user_groups_list(self):
63 63 c = self.load_default_context()
64 64 return self._get_template_context(c)
65 65
66 66 # permission check inside
67 67 @LoginRequired()
68 68 @NotAnonymous()
69 69 def user_groups_list_data(self):
70 70 self.load_default_context()
71 71 column_map = {
72 72 'active': 'users_group_active',
73 73 'description': 'user_group_description',
74 74 'members': 'members_total',
75 75 'owner': 'user_username',
76 76 'sync': 'group_data'
77 77 }
78 78 draw, start, limit = self._extract_chunk(self.request)
79 79 search_q, order_by, order_dir = self._extract_ordering(
80 80 self.request, column_map=column_map)
81 81
82 82 _render = self.request.get_partial_renderer(
83 83 'rhodecode:templates/data_table/_dt_elements.mako')
84 84
85 85 def user_group_name(user_group_name):
86 86 return _render("user_group_name", user_group_name)
87 87
88 88 def user_group_actions(user_group_id, user_group_name):
89 89 return _render("user_group_actions", user_group_id, user_group_name)
90 90
91 91 def user_profile(username):
92 92 return _render('user_profile', username)
93 93
94 94 _perms = ['usergroup.admin']
95 95 allowed_ids = [-1] + self._rhodecode_user.user_group_acl_ids_from_stack(_perms)
96 96
97 97 user_groups_data_total_count = UserGroup.query()\
98 98 .filter(or_(
99 99 # generate multiple IN to fix limitation problems
100 100 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
101 101 ))\
102 102 .count()
103 103
104 104 user_groups_data_total_inactive_count = UserGroup.query()\
105 105 .filter(or_(
106 106 # generate multiple IN to fix limitation problems
107 107 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
108 108 ))\
109 109 .filter(UserGroup.users_group_active != true()).count()
110 110
111 111 member_count = count(UserGroupMember.user_id)
112 112 base_q = Session.query(
113 113 UserGroup.users_group_name,
114 114 UserGroup.user_group_description,
115 115 UserGroup.users_group_active,
116 116 UserGroup.users_group_id,
117 117 UserGroup.group_data,
118 118 User,
119 119 member_count.label('member_count')
120 120 ) \
121 121 .filter(or_(
122 122 # generate multiple IN to fix limitation problems
123 123 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
124 124 )) \
125 125 .outerjoin(UserGroupMember, UserGroupMember.users_group_id == UserGroup.users_group_id) \
126 126 .join(User, User.user_id == UserGroup.user_id) \
127 127 .group_by(UserGroup, User)
128 128
129 129 base_q_inactive = base_q.filter(UserGroup.users_group_active != true())
130 130
131 131 if search_q:
132 132 like_expression = u'%{}%'.format(safe_unicode(search_q))
133 133 base_q = base_q.filter(or_(
134 134 UserGroup.users_group_name.ilike(like_expression),
135 135 ))
136 136 base_q_inactive = base_q.filter(UserGroup.users_group_active != true())
137 137
138 138 user_groups_data_total_filtered_count = base_q.count()
139 139 user_groups_data_total_filtered_inactive_count = base_q_inactive.count()
140 140
141 141 sort_defined = False
142 142 if order_by == 'members_total':
143 143 sort_col = member_count
144 144 sort_defined = True
145 145 elif order_by == 'user_username':
146 146 sort_col = User.username
147 147 else:
148 148 sort_col = getattr(UserGroup, order_by, None)
149 149
150 150 if sort_defined or sort_col:
151 151 if order_dir == 'asc':
152 152 sort_col = sort_col.asc()
153 153 else:
154 154 sort_col = sort_col.desc()
155 155
156 156 base_q = base_q.order_by(sort_col)
157 157 base_q = base_q.offset(start).limit(limit)
158 158
159 159 # authenticated access to user groups
160 160 auth_user_group_list = base_q.all()
161 161
162 162 user_groups_data = []
163 163 for user_gr in auth_user_group_list:
164 164 row = {
165 165 "users_group_name": user_group_name(user_gr.users_group_name),
166 166 "description": h.escape(user_gr.user_group_description),
167 167 "members": user_gr.member_count,
168 168 # NOTE(marcink): because of advanced query we
169 169 # need to load it like that
170 170 "sync": UserGroup._load_sync(
171 171 UserGroup._load_group_data(user_gr.group_data)),
172 172 "active": h.bool2icon(user_gr.users_group_active),
173 173 "owner": user_profile(user_gr.User.username),
174 174 "action": user_group_actions(
175 175 user_gr.users_group_id, user_gr.users_group_name)
176 176 }
177 177 user_groups_data.append(row)
178 178
179 179 data = ({
180 180 'draw': draw,
181 181 'data': user_groups_data,
182 182 'recordsTotal': user_groups_data_total_count,
183 183 'recordsTotalInactive': user_groups_data_total_inactive_count,
184 184 'recordsFiltered': user_groups_data_total_filtered_count,
185 185 'recordsFilteredInactive': user_groups_data_total_filtered_inactive_count,
186 186 })
187 187
188 188 return data
189 189
190 190 @LoginRequired()
191 191 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
192 192 def user_groups_new(self):
193 193 c = self.load_default_context()
194 194 return self._get_template_context(c)
195 195
196 196 @LoginRequired()
197 197 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
198 198 @CSRFRequired()
199 199 def user_groups_create(self):
200 200 _ = self.request.translate
201 201 c = self.load_default_context()
202 202 users_group_form = UserGroupForm(self.request.translate)()
203 203
204 204 user_group_name = self.request.POST.get('users_group_name')
205 205 try:
206 206 form_result = users_group_form.to_python(dict(self.request.POST))
207 207 user_group = UserGroupModel().create(
208 208 name=form_result['users_group_name'],
209 209 description=form_result['user_group_description'],
210 210 owner=self._rhodecode_user.user_id,
211 211 active=form_result['users_group_active'])
212 212 Session().flush()
213 213 creation_data = user_group.get_api_data()
214 214 user_group_name = form_result['users_group_name']
215 215
216 216 audit_logger.store_web(
217 217 'user_group.create', action_data={'data': creation_data},
218 218 user=self._rhodecode_user)
219 219
220 220 user_group_link = h.link_to(
221 221 h.escape(user_group_name),
222 222 h.route_path(
223 223 'edit_user_group', user_group_id=user_group.users_group_id))
224 224 h.flash(h.literal(_('Created user group %(user_group_link)s')
225 225 % {'user_group_link': user_group_link}),
226 226 category='success')
227 227 Session().commit()
228 228 user_group_id = user_group.users_group_id
229 229 except formencode.Invalid as errors:
230 230
231 231 data = render(
232 232 'rhodecode:templates/admin/user_groups/user_group_add.mako',
233 233 self._get_template_context(c), self.request)
234 234 html = formencode.htmlfill.render(
235 235 data,
236 236 defaults=errors.value,
237 237 errors=errors.error_dict or {},
238 238 prefix_error=False,
239 239 encoding="UTF-8",
240 240 force_defaults=False
241 241 )
242 242 return Response(html)
243 243
244 244 except Exception:
245 245 log.exception("Exception creating user group")
246 246 h.flash(_('Error occurred during creation of user group %s') \
247 247 % user_group_name, category='error')
248 248 raise HTTPFound(h.route_path('user_groups_new'))
249 249
250 affected_user_ids = [self._rhodecode_user.user_id]
251 PermissionModel().trigger_permission_flush(affected_user_ids)
250 PermissionModel().trigger_permission_flush()
252 251
253 252 raise HTTPFound(
254 253 h.route_path('edit_user_group', user_group_id=user_group_id))
@@ -1,1227 +1,1227 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 from rhodecode.apps._base import add_route_with_slash
21 21
22 22
23 23 def includeme(config):
24 24 from rhodecode.apps.repository.views.repo_artifacts import RepoArtifactsView
25 25 from rhodecode.apps.repository.views.repo_audit_logs import AuditLogsView
26 26 from rhodecode.apps.repository.views.repo_automation import RepoAutomationView
27 27 from rhodecode.apps.repository.views.repo_bookmarks import RepoBookmarksView
28 28 from rhodecode.apps.repository.views.repo_branch_permissions import RepoSettingsBranchPermissionsView
29 29 from rhodecode.apps.repository.views.repo_branches import RepoBranchesView
30 30 from rhodecode.apps.repository.views.repo_caches import RepoCachesView
31 31 from rhodecode.apps.repository.views.repo_changelog import RepoChangelogView
32 32 from rhodecode.apps.repository.views.repo_checks import RepoChecksView
33 33 from rhodecode.apps.repository.views.repo_commits import RepoCommitsView
34 34 from rhodecode.apps.repository.views.repo_compare import RepoCompareView
35 35 from rhodecode.apps.repository.views.repo_feed import RepoFeedView
36 36 from rhodecode.apps.repository.views.repo_files import RepoFilesView
37 37 from rhodecode.apps.repository.views.repo_forks import RepoForksView
38 38 from rhodecode.apps.repository.views.repo_maintainance import RepoMaintenanceView
39 39 from rhodecode.apps.repository.views.repo_permissions import RepoSettingsPermissionsView
40 40 from rhodecode.apps.repository.views.repo_pull_requests import RepoPullRequestsView
41 41 from rhodecode.apps.repository.views.repo_review_rules import RepoReviewRulesView
42 42 from rhodecode.apps.repository.views.repo_settings import RepoSettingsView
43 43 from rhodecode.apps.repository.views.repo_settings_advanced import RepoSettingsAdvancedView
44 44 from rhodecode.apps.repository.views.repo_settings_fields import RepoSettingsFieldsView
45 45 from rhodecode.apps.repository.views.repo_settings_issue_trackers import RepoSettingsIssueTrackersView
46 46 from rhodecode.apps.repository.views.repo_settings_remote import RepoSettingsRemoteView
47 47 from rhodecode.apps.repository.views.repo_settings_vcs import RepoSettingsVcsView
48 48 from rhodecode.apps.repository.views.repo_strip import RepoStripView
49 49 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
50 50 from rhodecode.apps.repository.views.repo_tags import RepoTagsView
51 51
52 52 # repo creating checks, special cases that aren't repo routes
53 53 config.add_route(
54 54 name='repo_creating',
55 55 pattern='/{repo_name:.*?[^/]}/repo_creating')
56 56 config.add_view(
57 57 RepoChecksView,
58 58 attr='repo_creating',
59 59 route_name='repo_creating', request_method='GET',
60 60 renderer='rhodecode:templates/admin/repos/repo_creating.mako')
61 61
62 62 config.add_route(
63 63 name='repo_creating_check',
64 64 pattern='/{repo_name:.*?[^/]}/repo_creating_check')
65 65 config.add_view(
66 66 RepoChecksView,
67 67 attr='repo_creating_check',
68 68 route_name='repo_creating_check', request_method='GET',
69 69 renderer='json_ext')
70 70
71 71 # Summary
72 72 # NOTE(marcink): one additional route is defined in very bottom, catch
73 73 # all pattern
74 74 config.add_route(
75 75 name='repo_summary_explicit',
76 76 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
77 77 config.add_view(
78 78 RepoSummaryView,
79 79 attr='summary',
80 80 route_name='repo_summary_explicit', request_method='GET',
81 81 renderer='rhodecode:templates/summary/summary.mako')
82 82
83 83 config.add_route(
84 84 name='repo_summary_commits',
85 85 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
86 86 config.add_view(
87 87 RepoSummaryView,
88 88 attr='summary_commits',
89 89 route_name='repo_summary_commits', request_method='GET',
90 90 renderer='rhodecode:templates/summary/summary_commits.mako')
91 91
92 92 # Commits
93 93 config.add_route(
94 94 name='repo_commit',
95 95 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
96 96 config.add_view(
97 97 RepoCommitsView,
98 98 attr='repo_commit_show',
99 99 route_name='repo_commit', request_method='GET',
100 100 renderer=None)
101 101
102 102 config.add_route(
103 103 name='repo_commit_children',
104 104 pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True)
105 105 config.add_view(
106 106 RepoCommitsView,
107 107 attr='repo_commit_children',
108 108 route_name='repo_commit_children', request_method='GET',
109 109 renderer='json_ext', xhr=True)
110 110
111 111 config.add_route(
112 112 name='repo_commit_parents',
113 113 pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True)
114 114 config.add_view(
115 115 RepoCommitsView,
116 116 attr='repo_commit_parents',
117 117 route_name='repo_commit_parents', request_method='GET',
118 118 renderer='json_ext')
119 119
120 120 config.add_route(
121 121 name='repo_commit_raw',
122 122 pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True)
123 123 config.add_view(
124 124 RepoCommitsView,
125 125 attr='repo_commit_raw',
126 126 route_name='repo_commit_raw', request_method='GET',
127 127 renderer=None)
128 128
129 129 config.add_route(
130 130 name='repo_commit_patch',
131 131 pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True)
132 132 config.add_view(
133 133 RepoCommitsView,
134 134 attr='repo_commit_patch',
135 135 route_name='repo_commit_patch', request_method='GET',
136 136 renderer=None)
137 137
138 138 config.add_route(
139 139 name='repo_commit_download',
140 140 pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True)
141 141 config.add_view(
142 142 RepoCommitsView,
143 143 attr='repo_commit_download',
144 144 route_name='repo_commit_download', request_method='GET',
145 145 renderer=None)
146 146
147 147 config.add_route(
148 148 name='repo_commit_data',
149 149 pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True)
150 150 config.add_view(
151 151 RepoCommitsView,
152 152 attr='repo_commit_data',
153 153 route_name='repo_commit_data', request_method='GET',
154 154 renderer='json_ext', xhr=True)
155 155
156 156 config.add_route(
157 157 name='repo_commit_comment_create',
158 158 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True)
159 159 config.add_view(
160 160 RepoCommitsView,
161 161 attr='repo_commit_comment_create',
162 162 route_name='repo_commit_comment_create', request_method='POST',
163 163 renderer='json_ext')
164 164
165 165 config.add_route(
166 166 name='repo_commit_comment_preview',
167 167 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True)
168 168 config.add_view(
169 169 RepoCommitsView,
170 170 attr='repo_commit_comment_preview',
171 171 route_name='repo_commit_comment_preview', request_method='POST',
172 172 renderer='string', xhr=True)
173 173
174 174 config.add_route(
175 175 name='repo_commit_comment_history_view',
176 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_history_id}/history_view', repo_route=True)
176 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/history_view/{comment_history_id}', repo_route=True)
177 177 config.add_view(
178 178 RepoCommitsView,
179 179 attr='repo_commit_comment_history_view',
180 180 route_name='repo_commit_comment_history_view', request_method='POST',
181 181 renderer='string', xhr=True)
182 182
183 183 config.add_route(
184 184 name='repo_commit_comment_attachment_upload',
185 185 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True)
186 186 config.add_view(
187 187 RepoCommitsView,
188 188 attr='repo_commit_comment_attachment_upload',
189 189 route_name='repo_commit_comment_attachment_upload', request_method='POST',
190 190 renderer='json_ext', xhr=True)
191 191
192 192 config.add_route(
193 193 name='repo_commit_comment_delete',
194 194 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True)
195 195 config.add_view(
196 196 RepoCommitsView,
197 197 attr='repo_commit_comment_delete',
198 198 route_name='repo_commit_comment_delete', request_method='POST',
199 199 renderer='json_ext')
200 200
201 201 config.add_route(
202 202 name='repo_commit_comment_edit',
203 203 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/edit', repo_route=True)
204 204 config.add_view(
205 205 RepoCommitsView,
206 206 attr='repo_commit_comment_edit',
207 207 route_name='repo_commit_comment_edit', request_method='POST',
208 208 renderer='json_ext')
209 209
210 210 # still working url for backward compat.
211 211 config.add_route(
212 212 name='repo_commit_raw_deprecated',
213 213 pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True)
214 214 config.add_view(
215 215 RepoCommitsView,
216 216 attr='repo_commit_raw',
217 217 route_name='repo_commit_raw_deprecated', request_method='GET',
218 218 renderer=None)
219 219
220 220 # Files
221 221 config.add_route(
222 222 name='repo_archivefile',
223 223 pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True)
224 224 config.add_view(
225 225 RepoFilesView,
226 226 attr='repo_archivefile',
227 227 route_name='repo_archivefile', request_method='GET',
228 228 renderer=None)
229 229
230 230 config.add_route(
231 231 name='repo_files_diff',
232 232 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
233 233 config.add_view(
234 234 RepoFilesView,
235 235 attr='repo_files_diff',
236 236 route_name='repo_files_diff', request_method='GET',
237 237 renderer=None)
238 238
239 239 config.add_route( # legacy route to make old links work
240 240 name='repo_files_diff_2way_redirect',
241 241 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
242 242 config.add_view(
243 243 RepoFilesView,
244 244 attr='repo_files_diff_2way_redirect',
245 245 route_name='repo_files_diff_2way_redirect', request_method='GET',
246 246 renderer=None)
247 247
248 248 config.add_route(
249 249 name='repo_files',
250 250 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
251 251 config.add_view(
252 252 RepoFilesView,
253 253 attr='repo_files',
254 254 route_name='repo_files', request_method='GET',
255 255 renderer=None)
256 256
257 257 config.add_route(
258 258 name='repo_files:default_path',
259 259 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
260 260 config.add_view(
261 261 RepoFilesView,
262 262 attr='repo_files',
263 263 route_name='repo_files:default_path', request_method='GET',
264 264 renderer=None)
265 265
266 266 config.add_route(
267 267 name='repo_files:default_commit',
268 268 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
269 269 config.add_view(
270 270 RepoFilesView,
271 271 attr='repo_files',
272 272 route_name='repo_files:default_commit', request_method='GET',
273 273 renderer=None)
274 274
275 275 config.add_route(
276 276 name='repo_files:rendered',
277 277 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
278 278 config.add_view(
279 279 RepoFilesView,
280 280 attr='repo_files',
281 281 route_name='repo_files:rendered', request_method='GET',
282 282 renderer=None)
283 283
284 284 config.add_route(
285 285 name='repo_files:annotated',
286 286 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
287 287 config.add_view(
288 288 RepoFilesView,
289 289 attr='repo_files',
290 290 route_name='repo_files:annotated', request_method='GET',
291 291 renderer=None)
292 292
293 293 config.add_route(
294 294 name='repo_files:annotated_previous',
295 295 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
296 296 config.add_view(
297 297 RepoFilesView,
298 298 attr='repo_files_annotated_previous',
299 299 route_name='repo_files:annotated_previous', request_method='GET',
300 300 renderer=None)
301 301
302 302 config.add_route(
303 303 name='repo_nodetree_full',
304 304 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
305 305 config.add_view(
306 306 RepoFilesView,
307 307 attr='repo_nodetree_full',
308 308 route_name='repo_nodetree_full', request_method='GET',
309 309 renderer=None, xhr=True)
310 310
311 311 config.add_route(
312 312 name='repo_nodetree_full:default_path',
313 313 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
314 314 config.add_view(
315 315 RepoFilesView,
316 316 attr='repo_nodetree_full',
317 317 route_name='repo_nodetree_full:default_path', request_method='GET',
318 318 renderer=None, xhr=True)
319 319
320 320 config.add_route(
321 321 name='repo_files_nodelist',
322 322 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
323 323 config.add_view(
324 324 RepoFilesView,
325 325 attr='repo_nodelist',
326 326 route_name='repo_files_nodelist', request_method='GET',
327 327 renderer='json_ext', xhr=True)
328 328
329 329 config.add_route(
330 330 name='repo_file_raw',
331 331 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
332 332 config.add_view(
333 333 RepoFilesView,
334 334 attr='repo_file_raw',
335 335 route_name='repo_file_raw', request_method='GET',
336 336 renderer=None)
337 337
338 338 config.add_route(
339 339 name='repo_file_download',
340 340 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
341 341 config.add_view(
342 342 RepoFilesView,
343 343 attr='repo_file_download',
344 344 route_name='repo_file_download', request_method='GET',
345 345 renderer=None)
346 346
347 347 config.add_route( # backward compat to keep old links working
348 348 name='repo_file_download:legacy',
349 349 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
350 350 repo_route=True)
351 351 config.add_view(
352 352 RepoFilesView,
353 353 attr='repo_file_download',
354 354 route_name='repo_file_download:legacy', request_method='GET',
355 355 renderer=None)
356 356
357 357 config.add_route(
358 358 name='repo_file_history',
359 359 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
360 360 config.add_view(
361 361 RepoFilesView,
362 362 attr='repo_file_history',
363 363 route_name='repo_file_history', request_method='GET',
364 364 renderer='json_ext')
365 365
366 366 config.add_route(
367 367 name='repo_file_authors',
368 368 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
369 369 config.add_view(
370 370 RepoFilesView,
371 371 attr='repo_file_authors',
372 372 route_name='repo_file_authors', request_method='GET',
373 373 renderer='rhodecode:templates/files/file_authors_box.mako')
374 374
375 375 config.add_route(
376 376 name='repo_files_check_head',
377 377 pattern='/{repo_name:.*?[^/]}/check_head/{commit_id}/{f_path:.*}',
378 378 repo_route=True)
379 379 config.add_view(
380 380 RepoFilesView,
381 381 attr='repo_files_check_head',
382 382 route_name='repo_files_check_head', request_method='POST',
383 383 renderer='json_ext', xhr=True)
384 384
385 385 config.add_route(
386 386 name='repo_files_remove_file',
387 387 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
388 388 repo_route=True)
389 389 config.add_view(
390 390 RepoFilesView,
391 391 attr='repo_files_remove_file',
392 392 route_name='repo_files_remove_file', request_method='GET',
393 393 renderer='rhodecode:templates/files/files_delete.mako')
394 394
395 395 config.add_route(
396 396 name='repo_files_delete_file',
397 397 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
398 398 repo_route=True)
399 399 config.add_view(
400 400 RepoFilesView,
401 401 attr='repo_files_delete_file',
402 402 route_name='repo_files_delete_file', request_method='POST',
403 403 renderer=None)
404 404
405 405 config.add_route(
406 406 name='repo_files_edit_file',
407 407 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
408 408 repo_route=True)
409 409 config.add_view(
410 410 RepoFilesView,
411 411 attr='repo_files_edit_file',
412 412 route_name='repo_files_edit_file', request_method='GET',
413 413 renderer='rhodecode:templates/files/files_edit.mako')
414 414
415 415 config.add_route(
416 416 name='repo_files_update_file',
417 417 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
418 418 repo_route=True)
419 419 config.add_view(
420 420 RepoFilesView,
421 421 attr='repo_files_update_file',
422 422 route_name='repo_files_update_file', request_method='POST',
423 423 renderer=None)
424 424
425 425 config.add_route(
426 426 name='repo_files_add_file',
427 427 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
428 428 repo_route=True)
429 429 config.add_view(
430 430 RepoFilesView,
431 431 attr='repo_files_add_file',
432 432 route_name='repo_files_add_file', request_method='GET',
433 433 renderer='rhodecode:templates/files/files_add.mako')
434 434
435 435 config.add_route(
436 436 name='repo_files_upload_file',
437 437 pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}',
438 438 repo_route=True)
439 439 config.add_view(
440 440 RepoFilesView,
441 441 attr='repo_files_add_file',
442 442 route_name='repo_files_upload_file', request_method='GET',
443 443 renderer='rhodecode:templates/files/files_upload.mako')
444 444 config.add_view( # POST creates
445 445 RepoFilesView,
446 446 attr='repo_files_upload_file',
447 447 route_name='repo_files_upload_file', request_method='POST',
448 448 renderer='json_ext')
449 449
450 450 config.add_route(
451 451 name='repo_files_create_file',
452 452 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
453 453 repo_route=True)
454 454 config.add_view( # POST creates
455 455 RepoFilesView,
456 456 attr='repo_files_create_file',
457 457 route_name='repo_files_create_file', request_method='POST',
458 458 renderer=None)
459 459
460 460 # Refs data
461 461 config.add_route(
462 462 name='repo_refs_data',
463 463 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
464 464 config.add_view(
465 465 RepoSummaryView,
466 466 attr='repo_refs_data',
467 467 route_name='repo_refs_data', request_method='GET',
468 468 renderer='json_ext')
469 469
470 470 config.add_route(
471 471 name='repo_refs_changelog_data',
472 472 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
473 473 config.add_view(
474 474 RepoSummaryView,
475 475 attr='repo_refs_changelog_data',
476 476 route_name='repo_refs_changelog_data', request_method='GET',
477 477 renderer='json_ext')
478 478
479 479 config.add_route(
480 480 name='repo_stats',
481 481 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
482 482 config.add_view(
483 483 RepoSummaryView,
484 484 attr='repo_stats',
485 485 route_name='repo_stats', request_method='GET',
486 486 renderer='json_ext')
487 487
488 488 # Commits
489 489 config.add_route(
490 490 name='repo_commits',
491 491 pattern='/{repo_name:.*?[^/]}/commits', repo_route=True)
492 492 config.add_view(
493 493 RepoChangelogView,
494 494 attr='repo_changelog',
495 495 route_name='repo_commits', request_method='GET',
496 496 renderer='rhodecode:templates/commits/changelog.mako')
497 497 # old routes for backward compat
498 498 config.add_view(
499 499 RepoChangelogView,
500 500 attr='repo_changelog',
501 501 route_name='repo_changelog', request_method='GET',
502 502 renderer='rhodecode:templates/commits/changelog.mako')
503 503
504 504 config.add_route(
505 505 name='repo_commits_elements',
506 506 pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True)
507 507 config.add_view(
508 508 RepoChangelogView,
509 509 attr='repo_commits_elements',
510 510 route_name='repo_commits_elements', request_method=('GET', 'POST'),
511 511 renderer='rhodecode:templates/commits/changelog_elements.mako',
512 512 xhr=True)
513 513
514 514 config.add_route(
515 515 name='repo_commits_elements_file',
516 516 pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True)
517 517 config.add_view(
518 518 RepoChangelogView,
519 519 attr='repo_commits_elements',
520 520 route_name='repo_commits_elements_file', request_method=('GET', 'POST'),
521 521 renderer='rhodecode:templates/commits/changelog_elements.mako',
522 522 xhr=True)
523 523
524 524 config.add_route(
525 525 name='repo_commits_file',
526 526 pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True)
527 527 config.add_view(
528 528 RepoChangelogView,
529 529 attr='repo_changelog',
530 530 route_name='repo_commits_file', request_method='GET',
531 531 renderer='rhodecode:templates/commits/changelog.mako')
532 532 # old routes for backward compat
533 533 config.add_view(
534 534 RepoChangelogView,
535 535 attr='repo_changelog',
536 536 route_name='repo_changelog_file', request_method='GET',
537 537 renderer='rhodecode:templates/commits/changelog.mako')
538 538
539 539 # Changelog (old deprecated name for commits page)
540 540 config.add_route(
541 541 name='repo_changelog',
542 542 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
543 543 config.add_route(
544 544 name='repo_changelog_file',
545 545 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
546 546
547 547 # Compare
548 548 config.add_route(
549 549 name='repo_compare_select',
550 550 pattern='/{repo_name:.*?[^/]}/compare', repo_route=True)
551 551 config.add_view(
552 552 RepoCompareView,
553 553 attr='compare_select',
554 554 route_name='repo_compare_select', request_method='GET',
555 555 renderer='rhodecode:templates/compare/compare_diff.mako')
556 556
557 557 config.add_route(
558 558 name='repo_compare',
559 559 pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True)
560 560 config.add_view(
561 561 RepoCompareView,
562 562 attr='compare',
563 563 route_name='repo_compare', request_method='GET',
564 564 renderer=None)
565 565
566 566 # Tags
567 567 config.add_route(
568 568 name='tags_home',
569 569 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
570 570 config.add_view(
571 571 RepoTagsView,
572 572 attr='tags',
573 573 route_name='tags_home', request_method='GET',
574 574 renderer='rhodecode:templates/tags/tags.mako')
575 575
576 576 # Branches
577 577 config.add_route(
578 578 name='branches_home',
579 579 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
580 580 config.add_view(
581 581 RepoBranchesView,
582 582 attr='branches',
583 583 route_name='branches_home', request_method='GET',
584 584 renderer='rhodecode:templates/branches/branches.mako')
585 585
586 586 # Bookmarks
587 587 config.add_route(
588 588 name='bookmarks_home',
589 589 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
590 590 config.add_view(
591 591 RepoBookmarksView,
592 592 attr='bookmarks',
593 593 route_name='bookmarks_home', request_method='GET',
594 594 renderer='rhodecode:templates/bookmarks/bookmarks.mako')
595 595
596 596 # Forks
597 597 config.add_route(
598 598 name='repo_fork_new',
599 599 pattern='/{repo_name:.*?[^/]}/fork', repo_route=True,
600 600 repo_forbid_when_archived=True,
601 601 repo_accepted_types=['hg', 'git'])
602 602 config.add_view(
603 603 RepoForksView,
604 604 attr='repo_fork_new',
605 605 route_name='repo_fork_new', request_method='GET',
606 606 renderer='rhodecode:templates/forks/forks.mako')
607 607
608 608 config.add_route(
609 609 name='repo_fork_create',
610 610 pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True,
611 611 repo_forbid_when_archived=True,
612 612 repo_accepted_types=['hg', 'git'])
613 613 config.add_view(
614 614 RepoForksView,
615 615 attr='repo_fork_create',
616 616 route_name='repo_fork_create', request_method='POST',
617 617 renderer='rhodecode:templates/forks/fork.mako')
618 618
619 619 config.add_route(
620 620 name='repo_forks_show_all',
621 621 pattern='/{repo_name:.*?[^/]}/forks', repo_route=True,
622 622 repo_accepted_types=['hg', 'git'])
623 623 config.add_view(
624 624 RepoForksView,
625 625 attr='repo_forks_show_all',
626 626 route_name='repo_forks_show_all', request_method='GET',
627 627 renderer='rhodecode:templates/forks/forks.mako')
628 628
629 629 config.add_route(
630 630 name='repo_forks_data',
631 631 pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True,
632 632 repo_accepted_types=['hg', 'git'])
633 633 config.add_view(
634 634 RepoForksView,
635 635 attr='repo_forks_data',
636 636 route_name='repo_forks_data', request_method='GET',
637 637 renderer='json_ext', xhr=True)
638 638
639 639 # Pull Requests
640 640 config.add_route(
641 641 name='pullrequest_show',
642 642 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}',
643 643 repo_route=True)
644 644 config.add_view(
645 645 RepoPullRequestsView,
646 646 attr='pull_request_show',
647 647 route_name='pullrequest_show', request_method='GET',
648 648 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
649 649
650 650 config.add_route(
651 651 name='pullrequest_show_all',
652 652 pattern='/{repo_name:.*?[^/]}/pull-request',
653 653 repo_route=True, repo_accepted_types=['hg', 'git'])
654 654 config.add_view(
655 655 RepoPullRequestsView,
656 656 attr='pull_request_list',
657 657 route_name='pullrequest_show_all', request_method='GET',
658 658 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
659 659
660 660 config.add_route(
661 661 name='pullrequest_show_all_data',
662 662 pattern='/{repo_name:.*?[^/]}/pull-request-data',
663 663 repo_route=True, repo_accepted_types=['hg', 'git'])
664 664 config.add_view(
665 665 RepoPullRequestsView,
666 666 attr='pull_request_list_data',
667 667 route_name='pullrequest_show_all_data', request_method='GET',
668 668 renderer='json_ext', xhr=True)
669 669
670 670 config.add_route(
671 671 name='pullrequest_repo_refs',
672 672 pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}',
673 673 repo_route=True)
674 674 config.add_view(
675 675 RepoPullRequestsView,
676 676 attr='pull_request_repo_refs',
677 677 route_name='pullrequest_repo_refs', request_method='GET',
678 678 renderer='json_ext', xhr=True)
679 679
680 680 config.add_route(
681 681 name='pullrequest_repo_targets',
682 682 pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets',
683 683 repo_route=True)
684 684 config.add_view(
685 685 RepoPullRequestsView,
686 686 attr='pullrequest_repo_targets',
687 687 route_name='pullrequest_repo_targets', request_method='GET',
688 688 renderer='json_ext', xhr=True)
689 689
690 690 config.add_route(
691 691 name='pullrequest_new',
692 692 pattern='/{repo_name:.*?[^/]}/pull-request/new',
693 693 repo_route=True, repo_accepted_types=['hg', 'git'],
694 694 repo_forbid_when_archived=True)
695 695 config.add_view(
696 696 RepoPullRequestsView,
697 697 attr='pull_request_new',
698 698 route_name='pullrequest_new', request_method='GET',
699 699 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
700 700
701 701 config.add_route(
702 702 name='pullrequest_create',
703 703 pattern='/{repo_name:.*?[^/]}/pull-request/create',
704 704 repo_route=True, repo_accepted_types=['hg', 'git'],
705 705 repo_forbid_when_archived=True)
706 706 config.add_view(
707 707 RepoPullRequestsView,
708 708 attr='pull_request_create',
709 709 route_name='pullrequest_create', request_method='POST',
710 710 renderer=None)
711 711
712 712 config.add_route(
713 713 name='pullrequest_update',
714 714 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update',
715 715 repo_route=True, repo_forbid_when_archived=True)
716 716 config.add_view(
717 717 RepoPullRequestsView,
718 718 attr='pull_request_update',
719 719 route_name='pullrequest_update', request_method='POST',
720 720 renderer='json_ext')
721 721
722 722 config.add_route(
723 723 name='pullrequest_merge',
724 724 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge',
725 725 repo_route=True, repo_forbid_when_archived=True)
726 726 config.add_view(
727 727 RepoPullRequestsView,
728 728 attr='pull_request_merge',
729 729 route_name='pullrequest_merge', request_method='POST',
730 730 renderer='json_ext')
731 731
732 732 config.add_route(
733 733 name='pullrequest_delete',
734 734 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete',
735 735 repo_route=True, repo_forbid_when_archived=True)
736 736 config.add_view(
737 737 RepoPullRequestsView,
738 738 attr='pull_request_delete',
739 739 route_name='pullrequest_delete', request_method='POST',
740 740 renderer='json_ext')
741 741
742 742 config.add_route(
743 743 name='pullrequest_comment_create',
744 744 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment',
745 745 repo_route=True)
746 746 config.add_view(
747 747 RepoPullRequestsView,
748 748 attr='pull_request_comment_create',
749 749 route_name='pullrequest_comment_create', request_method='POST',
750 750 renderer='json_ext')
751 751
752 752 config.add_route(
753 753 name='pullrequest_comment_edit',
754 754 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/edit',
755 755 repo_route=True, repo_accepted_types=['hg', 'git'])
756 756 config.add_view(
757 757 RepoPullRequestsView,
758 758 attr='pull_request_comment_edit',
759 759 route_name='pullrequest_comment_edit', request_method='POST',
760 760 renderer='json_ext')
761 761
762 762 config.add_route(
763 763 name='pullrequest_comment_delete',
764 764 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete',
765 765 repo_route=True, repo_accepted_types=['hg', 'git'])
766 766 config.add_view(
767 767 RepoPullRequestsView,
768 768 attr='pull_request_comment_delete',
769 769 route_name='pullrequest_comment_delete', request_method='POST',
770 770 renderer='json_ext')
771 771
772 772 config.add_route(
773 773 name='pullrequest_comments',
774 774 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comments',
775 775 repo_route=True)
776 776 config.add_view(
777 777 RepoPullRequestsView,
778 778 attr='pullrequest_comments',
779 779 route_name='pullrequest_comments', request_method='POST',
780 780 renderer='string_html', xhr=True)
781 781
782 782 config.add_route(
783 783 name='pullrequest_todos',
784 784 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/todos',
785 785 repo_route=True)
786 786 config.add_view(
787 787 RepoPullRequestsView,
788 788 attr='pullrequest_todos',
789 789 route_name='pullrequest_todos', request_method='POST',
790 790 renderer='string_html', xhr=True)
791 791
792 792 config.add_route(
793 793 name='pullrequest_drafts',
794 794 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/drafts',
795 795 repo_route=True)
796 796 config.add_view(
797 797 RepoPullRequestsView,
798 798 attr='pullrequest_drafts',
799 799 route_name='pullrequest_drafts', request_method='POST',
800 800 renderer='string_html', xhr=True)
801 801
802 802 # Artifacts, (EE feature)
803 803 config.add_route(
804 804 name='repo_artifacts_list',
805 805 pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True)
806 806 config.add_view(
807 807 RepoArtifactsView,
808 808 attr='repo_artifacts',
809 809 route_name='repo_artifacts_list', request_method='GET',
810 810 renderer='rhodecode:templates/artifacts/artifact_list.mako')
811 811
812 812 # Settings
813 813 config.add_route(
814 814 name='edit_repo',
815 815 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
816 816 config.add_view(
817 817 RepoSettingsView,
818 818 attr='edit_settings',
819 819 route_name='edit_repo', request_method='GET',
820 820 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
821 821 # update is POST on edit_repo
822 822 config.add_view(
823 823 RepoSettingsView,
824 824 attr='edit_settings_update',
825 825 route_name='edit_repo', request_method='POST',
826 826 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
827 827
828 828 # Settings advanced
829 829 config.add_route(
830 830 name='edit_repo_advanced',
831 831 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
832 832 config.add_view(
833 833 RepoSettingsAdvancedView,
834 834 attr='edit_advanced',
835 835 route_name='edit_repo_advanced', request_method='GET',
836 836 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
837 837
838 838 config.add_route(
839 839 name='edit_repo_advanced_archive',
840 840 pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True)
841 841 config.add_view(
842 842 RepoSettingsAdvancedView,
843 843 attr='edit_advanced_archive',
844 844 route_name='edit_repo_advanced_archive', request_method='POST',
845 845 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
846 846
847 847 config.add_route(
848 848 name='edit_repo_advanced_delete',
849 849 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
850 850 config.add_view(
851 851 RepoSettingsAdvancedView,
852 852 attr='edit_advanced_delete',
853 853 route_name='edit_repo_advanced_delete', request_method='POST',
854 854 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
855 855
856 856 config.add_route(
857 857 name='edit_repo_advanced_locking',
858 858 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
859 859 config.add_view(
860 860 RepoSettingsAdvancedView,
861 861 attr='edit_advanced_toggle_locking',
862 862 route_name='edit_repo_advanced_locking', request_method='POST',
863 863 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
864 864
865 865 config.add_route(
866 866 name='edit_repo_advanced_journal',
867 867 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
868 868 config.add_view(
869 869 RepoSettingsAdvancedView,
870 870 attr='edit_advanced_journal',
871 871 route_name='edit_repo_advanced_journal', request_method='POST',
872 872 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
873 873
874 874 config.add_route(
875 875 name='edit_repo_advanced_fork',
876 876 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
877 877 config.add_view(
878 878 RepoSettingsAdvancedView,
879 879 attr='edit_advanced_fork',
880 880 route_name='edit_repo_advanced_fork', request_method='POST',
881 881 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
882 882
883 883 config.add_route(
884 884 name='edit_repo_advanced_hooks',
885 885 pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True)
886 886 config.add_view(
887 887 RepoSettingsAdvancedView,
888 888 attr='edit_advanced_install_hooks',
889 889 route_name='edit_repo_advanced_hooks', request_method='GET',
890 890 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
891 891
892 892 # Caches
893 893 config.add_route(
894 894 name='edit_repo_caches',
895 895 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
896 896 config.add_view(
897 897 RepoCachesView,
898 898 attr='repo_caches',
899 899 route_name='edit_repo_caches', request_method='GET',
900 900 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
901 901 config.add_view(
902 902 RepoCachesView,
903 903 attr='repo_caches_purge',
904 904 route_name='edit_repo_caches', request_method='POST')
905 905
906 906 # Permissions
907 907 config.add_route(
908 908 name='edit_repo_perms',
909 909 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
910 910 config.add_view(
911 911 RepoSettingsPermissionsView,
912 912 attr='edit_permissions',
913 913 route_name='edit_repo_perms', request_method='GET',
914 914 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
915 915 config.add_view(
916 916 RepoSettingsPermissionsView,
917 917 attr='edit_permissions_update',
918 918 route_name='edit_repo_perms', request_method='POST',
919 919 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
920 920
921 921 config.add_route(
922 922 name='edit_repo_perms_set_private',
923 923 pattern='/{repo_name:.*?[^/]}/settings/permissions/set_private', repo_route=True)
924 924 config.add_view(
925 925 RepoSettingsPermissionsView,
926 926 attr='edit_permissions_set_private_repo',
927 927 route_name='edit_repo_perms_set_private', request_method='POST',
928 928 renderer='json_ext')
929 929
930 930 # Permissions Branch (EE feature)
931 931 config.add_route(
932 932 name='edit_repo_perms_branch',
933 933 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True)
934 934 config.add_view(
935 935 RepoSettingsBranchPermissionsView,
936 936 attr='branch_permissions',
937 937 route_name='edit_repo_perms_branch', request_method='GET',
938 938 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
939 939
940 940 config.add_route(
941 941 name='edit_repo_perms_branch_delete',
942 942 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete',
943 943 repo_route=True)
944 944 ## Only implemented in EE
945 945
946 946 # Maintenance
947 947 config.add_route(
948 948 name='edit_repo_maintenance',
949 949 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
950 950 config.add_view(
951 951 RepoMaintenanceView,
952 952 attr='repo_maintenance',
953 953 route_name='edit_repo_maintenance', request_method='GET',
954 954 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
955 955
956 956 config.add_route(
957 957 name='edit_repo_maintenance_execute',
958 958 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
959 959 config.add_view(
960 960 RepoMaintenanceView,
961 961 attr='repo_maintenance_execute',
962 962 route_name='edit_repo_maintenance_execute', request_method='GET',
963 963 renderer='json', xhr=True)
964 964
965 965 # Fields
966 966 config.add_route(
967 967 name='edit_repo_fields',
968 968 pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True)
969 969 config.add_view(
970 970 RepoSettingsFieldsView,
971 971 attr='repo_field_edit',
972 972 route_name='edit_repo_fields', request_method='GET',
973 973 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
974 974
975 975 config.add_route(
976 976 name='edit_repo_fields_create',
977 977 pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True)
978 978 config.add_view(
979 979 RepoSettingsFieldsView,
980 980 attr='repo_field_create',
981 981 route_name='edit_repo_fields_create', request_method='POST',
982 982 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
983 983
984 984 config.add_route(
985 985 name='edit_repo_fields_delete',
986 986 pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True)
987 987 config.add_view(
988 988 RepoSettingsFieldsView,
989 989 attr='repo_field_delete',
990 990 route_name='edit_repo_fields_delete', request_method='POST',
991 991 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
992 992
993 993 # Locking
994 994 config.add_route(
995 995 name='repo_edit_toggle_locking',
996 996 pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True)
997 997 config.add_view(
998 998 RepoSettingsView,
999 999 attr='edit_advanced_toggle_locking',
1000 1000 route_name='repo_edit_toggle_locking', request_method='GET',
1001 1001 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1002 1002
1003 1003 # Remote
1004 1004 config.add_route(
1005 1005 name='edit_repo_remote',
1006 1006 pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True)
1007 1007 config.add_view(
1008 1008 RepoSettingsRemoteView,
1009 1009 attr='repo_remote_edit_form',
1010 1010 route_name='edit_repo_remote', request_method='GET',
1011 1011 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1012 1012
1013 1013 config.add_route(
1014 1014 name='edit_repo_remote_pull',
1015 1015 pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True)
1016 1016 config.add_view(
1017 1017 RepoSettingsRemoteView,
1018 1018 attr='repo_remote_pull_changes',
1019 1019 route_name='edit_repo_remote_pull', request_method='POST',
1020 1020 renderer=None)
1021 1021
1022 1022 config.add_route(
1023 1023 name='edit_repo_remote_push',
1024 1024 pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True)
1025 1025
1026 1026 # Statistics
1027 1027 config.add_route(
1028 1028 name='edit_repo_statistics',
1029 1029 pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True)
1030 1030 config.add_view(
1031 1031 RepoSettingsView,
1032 1032 attr='edit_statistics_form',
1033 1033 route_name='edit_repo_statistics', request_method='GET',
1034 1034 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1035 1035
1036 1036 config.add_route(
1037 1037 name='edit_repo_statistics_reset',
1038 1038 pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True)
1039 1039 config.add_view(
1040 1040 RepoSettingsView,
1041 1041 attr='repo_statistics_reset',
1042 1042 route_name='edit_repo_statistics_reset', request_method='POST',
1043 1043 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1044 1044
1045 1045 # Issue trackers
1046 1046 config.add_route(
1047 1047 name='edit_repo_issuetracker',
1048 1048 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True)
1049 1049 config.add_view(
1050 1050 RepoSettingsIssueTrackersView,
1051 1051 attr='repo_issuetracker',
1052 1052 route_name='edit_repo_issuetracker', request_method='GET',
1053 1053 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1054 1054
1055 1055 config.add_route(
1056 1056 name='edit_repo_issuetracker_test',
1057 1057 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True)
1058 1058 config.add_view(
1059 1059 RepoSettingsIssueTrackersView,
1060 1060 attr='repo_issuetracker_test',
1061 1061 route_name='edit_repo_issuetracker_test', request_method='POST',
1062 1062 renderer='string', xhr=True)
1063 1063
1064 1064 config.add_route(
1065 1065 name='edit_repo_issuetracker_delete',
1066 1066 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True)
1067 1067 config.add_view(
1068 1068 RepoSettingsIssueTrackersView,
1069 1069 attr='repo_issuetracker_delete',
1070 1070 route_name='edit_repo_issuetracker_delete', request_method='POST',
1071 1071 renderer='json_ext', xhr=True)
1072 1072
1073 1073 config.add_route(
1074 1074 name='edit_repo_issuetracker_update',
1075 1075 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True)
1076 1076 config.add_view(
1077 1077 RepoSettingsIssueTrackersView,
1078 1078 attr='repo_issuetracker_update',
1079 1079 route_name='edit_repo_issuetracker_update', request_method='POST',
1080 1080 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1081 1081
1082 1082 # VCS Settings
1083 1083 config.add_route(
1084 1084 name='edit_repo_vcs',
1085 1085 pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True)
1086 1086 config.add_view(
1087 1087 RepoSettingsVcsView,
1088 1088 attr='repo_vcs_settings',
1089 1089 route_name='edit_repo_vcs', request_method='GET',
1090 1090 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1091 1091
1092 1092 config.add_route(
1093 1093 name='edit_repo_vcs_update',
1094 1094 pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True)
1095 1095 config.add_view(
1096 1096 RepoSettingsVcsView,
1097 1097 attr='repo_settings_vcs_update',
1098 1098 route_name='edit_repo_vcs_update', request_method='POST',
1099 1099 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1100 1100
1101 1101 # svn pattern
1102 1102 config.add_route(
1103 1103 name='edit_repo_vcs_svn_pattern_delete',
1104 1104 pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True)
1105 1105 config.add_view(
1106 1106 RepoSettingsVcsView,
1107 1107 attr='repo_settings_delete_svn_pattern',
1108 1108 route_name='edit_repo_vcs_svn_pattern_delete', request_method='POST',
1109 1109 renderer='json_ext', xhr=True)
1110 1110
1111 1111 # Repo Review Rules (EE feature)
1112 1112 config.add_route(
1113 1113 name='repo_reviewers',
1114 1114 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
1115 1115 config.add_view(
1116 1116 RepoReviewRulesView,
1117 1117 attr='repo_review_rules',
1118 1118 route_name='repo_reviewers', request_method='GET',
1119 1119 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1120 1120
1121 1121 config.add_route(
1122 1122 name='repo_default_reviewers_data',
1123 1123 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
1124 1124 config.add_view(
1125 1125 RepoReviewRulesView,
1126 1126 attr='repo_default_reviewers_data',
1127 1127 route_name='repo_default_reviewers_data', request_method='GET',
1128 1128 renderer='json_ext')
1129 1129
1130 1130 # Repo Automation (EE feature)
1131 1131 config.add_route(
1132 1132 name='repo_automation',
1133 1133 pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True)
1134 1134 config.add_view(
1135 1135 RepoAutomationView,
1136 1136 attr='repo_automation',
1137 1137 route_name='repo_automation', request_method='GET',
1138 1138 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1139 1139
1140 1140 # Strip
1141 1141 config.add_route(
1142 1142 name='edit_repo_strip',
1143 1143 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
1144 1144 config.add_view(
1145 1145 RepoStripView,
1146 1146 attr='strip',
1147 1147 route_name='edit_repo_strip', request_method='GET',
1148 1148 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1149 1149
1150 1150 config.add_route(
1151 1151 name='strip_check',
1152 1152 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
1153 1153 config.add_view(
1154 1154 RepoStripView,
1155 1155 attr='strip_check',
1156 1156 route_name='strip_check', request_method='POST',
1157 1157 renderer='json', xhr=True)
1158 1158
1159 1159 config.add_route(
1160 1160 name='strip_execute',
1161 1161 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
1162 1162 config.add_view(
1163 1163 RepoStripView,
1164 1164 attr='strip_execute',
1165 1165 route_name='strip_execute', request_method='POST',
1166 1166 renderer='json', xhr=True)
1167 1167
1168 1168 # Audit logs
1169 1169 config.add_route(
1170 1170 name='edit_repo_audit_logs',
1171 1171 pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True)
1172 1172 config.add_view(
1173 1173 AuditLogsView,
1174 1174 attr='repo_audit_logs',
1175 1175 route_name='edit_repo_audit_logs', request_method='GET',
1176 1176 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1177 1177
1178 1178 # ATOM/RSS Feed, shouldn't contain slashes for outlook compatibility
1179 1179 config.add_route(
1180 1180 name='rss_feed_home',
1181 1181 pattern='/{repo_name:.*?[^/]}/feed-rss', repo_route=True)
1182 1182 config.add_view(
1183 1183 RepoFeedView,
1184 1184 attr='rss',
1185 1185 route_name='rss_feed_home', request_method='GET', renderer=None)
1186 1186
1187 1187 config.add_route(
1188 1188 name='rss_feed_home_old',
1189 1189 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
1190 1190 config.add_view(
1191 1191 RepoFeedView,
1192 1192 attr='rss',
1193 1193 route_name='rss_feed_home_old', request_method='GET', renderer=None)
1194 1194
1195 1195 config.add_route(
1196 1196 name='atom_feed_home',
1197 1197 pattern='/{repo_name:.*?[^/]}/feed-atom', repo_route=True)
1198 1198 config.add_view(
1199 1199 RepoFeedView,
1200 1200 attr='atom',
1201 1201 route_name='atom_feed_home', request_method='GET', renderer=None)
1202 1202
1203 1203 config.add_route(
1204 1204 name='atom_feed_home_old',
1205 1205 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
1206 1206 config.add_view(
1207 1207 RepoFeedView,
1208 1208 attr='atom',
1209 1209 route_name='atom_feed_home_old', request_method='GET', renderer=None)
1210 1210
1211 1211 # NOTE(marcink): needs to be at the end for catch-all
1212 1212 add_route_with_slash(
1213 1213 config,
1214 1214 name='repo_summary',
1215 1215 pattern='/{repo_name:.*?[^/]}', repo_route=True)
1216 1216 config.add_view(
1217 1217 RepoSummaryView,
1218 1218 attr='summary',
1219 1219 route_name='repo_summary', request_method='GET',
1220 1220 renderer='rhodecode:templates/summary/summary.mako')
1221 1221
1222 1222 # TODO(marcink): there's no such route??
1223 1223 config.add_view(
1224 1224 RepoSummaryView,
1225 1225 attr='summary',
1226 1226 route_name='repo_summary_slash', request_method='GET',
1227 1227 renderer='rhodecode:templates/summary/summary.mako') No newline at end of file
@@ -1,358 +1,358 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from pyramid.httpexceptions import HTTPNotFound, HTTPFound
25 25
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import RepoAppView
30 30 import rhodecode.lib.helpers as h
31 31 from rhodecode.lib.auth import (
32 32 LoginRequired, HasRepoPermissionAnyDecorator)
33 33
34 34 from rhodecode.lib.ext_json import json
35 35 from rhodecode.lib.graphmod import _colored, _dagwalker
36 36 from rhodecode.lib.helpers import RepoPage
37 37 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool, safe_unicode
38 38 from rhodecode.lib.vcs.exceptions import (
39 39 RepositoryError, CommitDoesNotExistError,
40 40 CommitError, NodeDoesNotExistError, EmptyRepositoryError)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44 DEFAULT_CHANGELOG_SIZE = 20
45 45
46 46
47 47 class RepoChangelogView(RepoAppView):
48 48
49 49 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
50 50 """
51 51 This is a safe way to get commit. If an error occurs it redirects to
52 52 tip with proper message
53 53
54 54 :param commit_id: id of commit to fetch
55 55 :param redirect_after: toggle redirection
56 56 """
57 57 _ = self.request.translate
58 58
59 59 try:
60 60 return self.rhodecode_vcs_repo.get_commit(commit_id)
61 61 except EmptyRepositoryError:
62 62 if not redirect_after:
63 63 return None
64 64
65 65 h.flash(h.literal(
66 66 _('There are no commits yet')), category='warning')
67 67 raise HTTPFound(
68 68 h.route_path('repo_summary', repo_name=self.db_repo_name))
69 69
70 70 except (CommitDoesNotExistError, LookupError):
71 71 msg = _('No such commit exists for this repository')
72 72 h.flash(msg, category='error')
73 73 raise HTTPNotFound()
74 74 except RepositoryError as e:
75 h.flash(safe_str(h.escape(e)), category='error')
75 h.flash(h.escape(safe_str(e)), category='error')
76 76 raise HTTPNotFound()
77 77
78 78 def _graph(self, repo, commits, prev_data=None, next_data=None):
79 79 """
80 80 Generates a DAG graph for repo
81 81
82 82 :param repo: repo instance
83 83 :param commits: list of commits
84 84 """
85 85 if not commits:
86 86 return json.dumps([]), json.dumps([])
87 87
88 88 def serialize(commit, parents=True):
89 89 data = dict(
90 90 raw_id=commit.raw_id,
91 91 idx=commit.idx,
92 92 branch=None,
93 93 )
94 94 if parents:
95 95 data['parents'] = [
96 96 serialize(x, parents=False) for x in commit.parents]
97 97 return data
98 98
99 99 prev_data = prev_data or []
100 100 next_data = next_data or []
101 101
102 102 current = [serialize(x) for x in commits]
103 103 commits = prev_data + current + next_data
104 104
105 105 dag = _dagwalker(repo, commits)
106 106
107 107 data = [[commit_id, vtx, edges, branch]
108 108 for commit_id, vtx, edges, branch in _colored(dag)]
109 109 return json.dumps(data), json.dumps(current)
110 110
111 111 def _check_if_valid_branch(self, branch_name, repo_name, f_path):
112 112 if branch_name not in self.rhodecode_vcs_repo.branches_all:
113 113 h.flash(u'Branch {} is not found.'.format(h.escape(safe_unicode(branch_name))),
114 114 category='warning')
115 115 redirect_url = h.route_path(
116 116 'repo_commits_file', repo_name=repo_name,
117 117 commit_id=branch_name, f_path=f_path or '')
118 118 raise HTTPFound(redirect_url)
119 119
120 120 def _load_changelog_data(
121 121 self, c, collection, page, chunk_size, branch_name=None,
122 122 dynamic=False, f_path=None, commit_id=None):
123 123
124 124 def url_generator(page_num):
125 125 query_params = {
126 126 'page': page_num
127 127 }
128 128
129 129 if branch_name:
130 130 query_params.update({
131 131 'branch': branch_name
132 132 })
133 133
134 134 if f_path:
135 135 # changelog for file
136 136 return h.route_path(
137 137 'repo_commits_file',
138 138 repo_name=c.rhodecode_db_repo.repo_name,
139 139 commit_id=commit_id, f_path=f_path,
140 140 _query=query_params)
141 141 else:
142 142 return h.route_path(
143 143 'repo_commits',
144 144 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
145 145
146 146 c.total_cs = len(collection)
147 147 c.showing_commits = min(chunk_size, c.total_cs)
148 148 c.pagination = RepoPage(collection, page=page, item_count=c.total_cs,
149 149 items_per_page=chunk_size, url_maker=url_generator)
150 150
151 151 c.next_page = c.pagination.next_page
152 152 c.prev_page = c.pagination.previous_page
153 153
154 154 if dynamic:
155 155 if self.request.GET.get('chunk') != 'next':
156 156 c.next_page = None
157 157 if self.request.GET.get('chunk') != 'prev':
158 158 c.prev_page = None
159 159
160 160 page_commit_ids = [x.raw_id for x in c.pagination]
161 161 c.comments = c.rhodecode_db_repo.get_comments(page_commit_ids)
162 162 c.statuses = c.rhodecode_db_repo.statuses(page_commit_ids)
163 163
164 164 def load_default_context(self):
165 165 c = self._get_local_tmpl_context(include_app_defaults=True)
166 166
167 167 c.rhodecode_repo = self.rhodecode_vcs_repo
168 168
169 169 return c
170 170
171 171 def _get_preload_attrs(self):
172 172 pre_load = ['author', 'branch', 'date', 'message', 'parents',
173 173 'obsolete', 'phase', 'hidden']
174 174 return pre_load
175 175
176 176 @LoginRequired()
177 177 @HasRepoPermissionAnyDecorator(
178 178 'repository.read', 'repository.write', 'repository.admin')
179 179 def repo_changelog(self):
180 180 c = self.load_default_context()
181 181
182 182 commit_id = self.request.matchdict.get('commit_id')
183 183 f_path = self._get_f_path(self.request.matchdict)
184 184 show_hidden = str2bool(self.request.GET.get('evolve'))
185 185
186 186 chunk_size = 20
187 187
188 188 c.branch_name = branch_name = self.request.GET.get('branch') or ''
189 189 c.book_name = book_name = self.request.GET.get('bookmark') or ''
190 190 c.f_path = f_path
191 191 c.commit_id = commit_id
192 192 c.show_hidden = show_hidden
193 193
194 194 hist_limit = safe_int(self.request.GET.get('limit')) or None
195 195
196 196 p = safe_int(self.request.GET.get('page', 1), 1)
197 197
198 198 c.selected_name = branch_name or book_name
199 199 if not commit_id and branch_name:
200 200 self._check_if_valid_branch(branch_name, self.db_repo_name, f_path)
201 201
202 202 c.changelog_for_path = f_path
203 203 pre_load = self._get_preload_attrs()
204 204
205 205 partial_xhr = self.request.environ.get('HTTP_X_PARTIAL_XHR')
206 206
207 207 try:
208 208 if f_path:
209 209 log.debug('generating changelog for path %s', f_path)
210 210 # get the history for the file !
211 211 base_commit = self.rhodecode_vcs_repo.get_commit(commit_id)
212 212
213 213 try:
214 214 collection = base_commit.get_path_history(
215 215 f_path, limit=hist_limit, pre_load=pre_load)
216 216 if collection and partial_xhr:
217 217 # for ajax call we remove first one since we're looking
218 218 # at it right now in the context of a file commit
219 219 collection.pop(0)
220 220 except (NodeDoesNotExistError, CommitError):
221 221 # this node is not present at tip!
222 222 try:
223 223 commit = self._get_commit_or_redirect(commit_id)
224 224 collection = commit.get_path_history(f_path)
225 225 except RepositoryError as e:
226 226 h.flash(safe_str(e), category='warning')
227 227 redirect_url = h.route_path(
228 228 'repo_commits', repo_name=self.db_repo_name)
229 229 raise HTTPFound(redirect_url)
230 230 collection = list(reversed(collection))
231 231 else:
232 232 collection = self.rhodecode_vcs_repo.get_commits(
233 233 branch_name=branch_name, show_hidden=show_hidden,
234 234 pre_load=pre_load, translate_tags=False)
235 235
236 236 self._load_changelog_data(
237 237 c, collection, p, chunk_size, c.branch_name,
238 238 f_path=f_path, commit_id=commit_id)
239 239
240 240 except EmptyRepositoryError as e:
241 h.flash(safe_str(h.escape(e)), category='warning')
241 h.flash(h.escape(safe_str(e)), category='warning')
242 242 raise HTTPFound(
243 243 h.route_path('repo_summary', repo_name=self.db_repo_name))
244 244 except HTTPFound:
245 245 raise
246 246 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
247 247 log.exception(safe_str(e))
248 h.flash(safe_str(h.escape(e)), category='error')
248 h.flash(h.escape(safe_str(e)), category='error')
249 249
250 250 if commit_id:
251 251 # from single commit page, we redirect to main commits
252 252 raise HTTPFound(
253 253 h.route_path('repo_commits', repo_name=self.db_repo_name))
254 254 else:
255 255 # otherwise we redirect to summary
256 256 raise HTTPFound(
257 257 h.route_path('repo_summary', repo_name=self.db_repo_name))
258 258
259 259 if partial_xhr or self.request.environ.get('HTTP_X_PJAX'):
260 260 # case when loading dynamic file history in file view
261 261 # loading from ajax, we don't want the first result, it's popped
262 262 # in the code above
263 263 html = render(
264 264 'rhodecode:templates/commits/changelog_file_history.mako',
265 265 self._get_template_context(c), self.request)
266 266 return Response(html)
267 267
268 268 commit_ids = []
269 269 if not f_path:
270 270 # only load graph data when not in file history mode
271 271 commit_ids = c.pagination
272 272
273 273 c.graph_data, c.graph_commits = self._graph(
274 274 self.rhodecode_vcs_repo, commit_ids)
275 275
276 276 return self._get_template_context(c)
277 277
278 278 @LoginRequired()
279 279 @HasRepoPermissionAnyDecorator(
280 280 'repository.read', 'repository.write', 'repository.admin')
281 281 def repo_commits_elements(self):
282 282 c = self.load_default_context()
283 283 commit_id = self.request.matchdict.get('commit_id')
284 284 f_path = self._get_f_path(self.request.matchdict)
285 285 show_hidden = str2bool(self.request.GET.get('evolve'))
286 286
287 287 chunk_size = 20
288 288 hist_limit = safe_int(self.request.GET.get('limit')) or None
289 289
290 290 def wrap_for_error(err):
291 291 html = '<tr>' \
292 292 '<td colspan="9" class="alert alert-error">ERROR: {}</td>' \
293 293 '</tr>'.format(err)
294 294 return Response(html)
295 295
296 296 c.branch_name = branch_name = self.request.GET.get('branch') or ''
297 297 c.book_name = book_name = self.request.GET.get('bookmark') or ''
298 298 c.f_path = f_path
299 299 c.commit_id = commit_id
300 300 c.show_hidden = show_hidden
301 301
302 302 c.selected_name = branch_name or book_name
303 303 if branch_name and branch_name not in self.rhodecode_vcs_repo.branches_all:
304 304 return wrap_for_error(
305 305 safe_str('Branch: {} is not valid'.format(branch_name)))
306 306
307 307 pre_load = self._get_preload_attrs()
308 308
309 309 if f_path:
310 310 try:
311 311 base_commit = self.rhodecode_vcs_repo.get_commit(commit_id)
312 312 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
313 313 log.exception(safe_str(e))
314 314 raise HTTPFound(
315 315 h.route_path('repo_commits', repo_name=self.db_repo_name))
316 316
317 317 collection = base_commit.get_path_history(
318 318 f_path, limit=hist_limit, pre_load=pre_load)
319 319 collection = list(reversed(collection))
320 320 else:
321 321 collection = self.rhodecode_vcs_repo.get_commits(
322 322 branch_name=branch_name, show_hidden=show_hidden, pre_load=pre_load,
323 323 translate_tags=False)
324 324
325 325 p = safe_int(self.request.GET.get('page', 1), 1)
326 326 try:
327 327 self._load_changelog_data(
328 328 c, collection, p, chunk_size, dynamic=True,
329 329 f_path=f_path, commit_id=commit_id)
330 330 except EmptyRepositoryError as e:
331 331 return wrap_for_error(safe_str(e))
332 332 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
333 333 log.exception('Failed to fetch commits')
334 334 return wrap_for_error(safe_str(e))
335 335
336 336 prev_data = None
337 337 next_data = None
338 338
339 339 try:
340 340 prev_graph = json.loads(self.request.POST.get('graph') or '{}')
341 341 except json.JSONDecodeError:
342 342 prev_graph = {}
343 343
344 344 if self.request.GET.get('chunk') == 'prev':
345 345 next_data = prev_graph
346 346 elif self.request.GET.get('chunk') == 'next':
347 347 prev_data = prev_graph
348 348
349 349 commit_ids = []
350 350 if not f_path:
351 351 # only load graph data when not in file history mode
352 352 commit_ids = c.pagination
353 353
354 354 c.graph_data, c.graph_commits = self._graph(
355 355 self.rhodecode_vcs_repo, commit_ids,
356 356 prev_data=prev_data, next_data=next_data)
357 357
358 358 return self._get_template_context(c)
@@ -1,818 +1,819 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 from pyramid.httpexceptions import (
25 25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import RepoAppView
30 30 from rhodecode.apps.file_store import utils as store_utils
31 31 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
32 32
33 33 from rhodecode.lib import diffs, codeblocks, channelstream
34 34 from rhodecode.lib.auth import (
35 35 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
36 36 from rhodecode.lib.ext_json import json
37 37 from rhodecode.lib.compat import OrderedDict
38 38 from rhodecode.lib.diffs import (
39 39 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
40 40 get_diff_whitespace_flag)
41 41 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
42 42 import rhodecode.lib.helpers as h
43 43 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict, safe_str
44 44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 RepositoryError, CommitDoesNotExistError)
47 47 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
48 48 ChangesetCommentHistory
49 49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 50 from rhodecode.model.comment import CommentsModel
51 51 from rhodecode.model.meta import Session
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 def _update_with_GET(params, request):
58 58 for k in ['diff1', 'diff2', 'diff']:
59 59 params[k] += request.GET.getall(k)
60 60
61 61
62 62 class RepoCommitsView(RepoAppView):
63 63 def load_default_context(self):
64 64 c = self._get_local_tmpl_context(include_app_defaults=True)
65 65 c.rhodecode_repo = self.rhodecode_vcs_repo
66 66
67 67 return c
68 68
69 69 def _is_diff_cache_enabled(self, target_repo):
70 70 caching_enabled = self._get_general_setting(
71 71 target_repo, 'rhodecode_diff_cache')
72 72 log.debug('Diff caching enabled: %s', caching_enabled)
73 73 return caching_enabled
74 74
75 75 def _commit(self, commit_id_range, method):
76 76 _ = self.request.translate
77 77 c = self.load_default_context()
78 78 c.fulldiff = self.request.GET.get('fulldiff')
79 79 redirect_to_combined = str2bool(self.request.GET.get('redirect_combined'))
80 80
81 81 # fetch global flags of ignore ws or context lines
82 82 diff_context = get_diff_context(self.request)
83 83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84 84
85 85 # diff_limit will cut off the whole diff if the limit is applied
86 86 # otherwise it will just hide the big files from the front-end
87 87 diff_limit = c.visual.cut_off_limit_diff
88 88 file_limit = c.visual.cut_off_limit_file
89 89
90 90 # get ranges of commit ids if preset
91 91 commit_range = commit_id_range.split('...')[:2]
92 92
93 93 try:
94 94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 95 'message', 'parents']
96 96 if self.rhodecode_vcs_repo.alias == 'hg':
97 97 pre_load += ['hidden', 'obsolete', 'phase']
98 98
99 99 if len(commit_range) == 2:
100 100 commits = self.rhodecode_vcs_repo.get_commits(
101 101 start_id=commit_range[0], end_id=commit_range[1],
102 102 pre_load=pre_load, translate_tags=False)
103 103 commits = list(commits)
104 104 else:
105 105 commits = [self.rhodecode_vcs_repo.get_commit(
106 106 commit_id=commit_id_range, pre_load=pre_load)]
107 107
108 108 c.commit_ranges = commits
109 109 if not c.commit_ranges:
110 110 raise RepositoryError('The commit range returned an empty result')
111 111 except CommitDoesNotExistError as e:
112 112 msg = _('No such commit exists. Org exception: `{}`').format(safe_str(e))
113 113 h.flash(msg, category='error')
114 114 raise HTTPNotFound()
115 115 except Exception:
116 116 log.exception("General failure")
117 117 raise HTTPNotFound()
118 118 single_commit = len(c.commit_ranges) == 1
119 119
120 120 if redirect_to_combined and not single_commit:
121 121 source_ref = getattr(c.commit_ranges[0].parents[0]
122 122 if c.commit_ranges[0].parents else h.EmptyCommit(), 'raw_id')
123 123 target_ref = c.commit_ranges[-1].raw_id
124 124 next_url = h.route_path(
125 125 'repo_compare',
126 126 repo_name=c.repo_name,
127 127 source_ref_type='rev',
128 128 source_ref=source_ref,
129 129 target_ref_type='rev',
130 130 target_ref=target_ref)
131 131 raise HTTPFound(next_url)
132 132
133 133 c.changes = OrderedDict()
134 134 c.lines_added = 0
135 135 c.lines_deleted = 0
136 136
137 137 # auto collapse if we have more than limit
138 138 collapse_limit = diffs.DiffProcessor._collapse_commits_over
139 139 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
140 140
141 141 c.commit_statuses = ChangesetStatus.STATUSES
142 142 c.inline_comments = []
143 143 c.files = []
144 144
145 145 c.comments = []
146 146 c.unresolved_comments = []
147 147 c.resolved_comments = []
148 148
149 149 # Single commit
150 150 if single_commit:
151 151 commit = c.commit_ranges[0]
152 152 c.comments = CommentsModel().get_comments(
153 153 self.db_repo.repo_id,
154 154 revision=commit.raw_id)
155 155
156 156 # comments from PR
157 157 statuses = ChangesetStatusModel().get_statuses(
158 158 self.db_repo.repo_id, commit.raw_id,
159 159 with_revisions=True)
160 160
161 161 prs = set()
162 162 reviewers = list()
163 163 reviewers_duplicates = set() # to not have duplicates from multiple votes
164 164 for c_status in statuses:
165 165
166 166 # extract associated pull-requests from votes
167 167 if c_status.pull_request:
168 168 prs.add(c_status.pull_request)
169 169
170 170 # extract reviewers
171 171 _user_id = c_status.author.user_id
172 172 if _user_id not in reviewers_duplicates:
173 173 reviewers.append(
174 174 StrictAttributeDict({
175 175 'user': c_status.author,
176 176
177 177 # fake attributed for commit, page that we don't have
178 178 # but we share the display with PR page
179 179 'mandatory': False,
180 180 'reasons': [],
181 181 'rule_user_group_data': lambda: None
182 182 })
183 183 )
184 184 reviewers_duplicates.add(_user_id)
185 185
186 186 c.reviewers_count = len(reviewers)
187 187 c.observers_count = 0
188 188
189 189 # from associated statuses, check the pull requests, and
190 190 # show comments from them
191 191 for pr in prs:
192 192 c.comments.extend(pr.comments)
193 193
194 194 c.unresolved_comments = CommentsModel()\
195 195 .get_commit_unresolved_todos(commit.raw_id)
196 196 c.resolved_comments = CommentsModel()\
197 197 .get_commit_resolved_todos(commit.raw_id)
198 198
199 199 c.inline_comments_flat = CommentsModel()\
200 200 .get_commit_inline_comments(commit.raw_id)
201 201
202 202 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
203 203 statuses, reviewers)
204 204
205 205 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
206 206
207 207 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
208 208
209 209 for review_obj, member, reasons, mandatory, status in review_statuses:
210 210 member_reviewer = h.reviewer_as_json(
211 211 member, reasons=reasons, mandatory=mandatory, role=None,
212 212 user_group=None
213 213 )
214 214
215 215 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
216 216 member_reviewer['review_status'] = current_review_status
217 217 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
218 218 member_reviewer['allowed_to_update'] = False
219 219 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
220 220
221 221 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
222 222
223 223 # NOTE(marcink): this uses the same voting logic as in pull-requests
224 224 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
225 225 c.commit_broadcast_channel = channelstream.comment_channel(c.repo_name, commit_obj=commit)
226 226
227 227 diff = None
228 228 # Iterate over ranges (default commit view is always one commit)
229 229 for commit in c.commit_ranges:
230 230 c.changes[commit.raw_id] = []
231 231
232 232 commit2 = commit
233 233 commit1 = commit.first_parent
234 234
235 235 if method == 'show':
236 236 inline_comments = CommentsModel().get_inline_comments(
237 237 self.db_repo.repo_id, revision=commit.raw_id)
238 238 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
239 239 inline_comments))
240 240 c.inline_comments = inline_comments
241 241
242 242 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
243 243 self.db_repo)
244 244 cache_file_path = diff_cache_exist(
245 245 cache_path, 'diff', commit.raw_id,
246 246 hide_whitespace_changes, diff_context, c.fulldiff)
247 247
248 248 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
249 249 force_recache = str2bool(self.request.GET.get('force_recache'))
250 250
251 251 cached_diff = None
252 252 if caching_enabled:
253 253 cached_diff = load_cached_diff(cache_file_path)
254 254
255 255 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
256 256 if not force_recache and has_proper_diff_cache:
257 257 diffset = cached_diff['diff']
258 258 else:
259 259 vcs_diff = self.rhodecode_vcs_repo.get_diff(
260 260 commit1, commit2,
261 261 ignore_whitespace=hide_whitespace_changes,
262 262 context=diff_context)
263 263
264 264 diff_processor = diffs.DiffProcessor(
265 265 vcs_diff, format='newdiff', diff_limit=diff_limit,
266 266 file_limit=file_limit, show_full_diff=c.fulldiff)
267 267
268 268 _parsed = diff_processor.prepare()
269 269
270 270 diffset = codeblocks.DiffSet(
271 271 repo_name=self.db_repo_name,
272 272 source_node_getter=codeblocks.diffset_node_getter(commit1),
273 273 target_node_getter=codeblocks.diffset_node_getter(commit2))
274 274
275 275 diffset = self.path_filter.render_patchset_filtered(
276 276 diffset, _parsed, commit1.raw_id, commit2.raw_id)
277 277
278 278 # save cached diff
279 279 if caching_enabled:
280 280 cache_diff(cache_file_path, diffset, None)
281 281
282 282 c.limited_diff = diffset.limited_diff
283 283 c.changes[commit.raw_id] = diffset
284 284 else:
285 285 # TODO(marcink): no cache usage here...
286 286 _diff = self.rhodecode_vcs_repo.get_diff(
287 287 commit1, commit2,
288 288 ignore_whitespace=hide_whitespace_changes, context=diff_context)
289 289 diff_processor = diffs.DiffProcessor(
290 290 _diff, format='newdiff', diff_limit=diff_limit,
291 291 file_limit=file_limit, show_full_diff=c.fulldiff)
292 292 # downloads/raw we only need RAW diff nothing else
293 293 diff = self.path_filter.get_raw_patch(diff_processor)
294 294 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
295 295
296 296 # sort comments by how they were generated
297 297 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
298 298 c.at_version_num = None
299 299
300 300 if len(c.commit_ranges) == 1:
301 301 c.commit = c.commit_ranges[0]
302 302 c.parent_tmpl = ''.join(
303 303 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
304 304
305 305 if method == 'download':
306 306 response = Response(diff)
307 307 response.content_type = 'text/plain'
308 308 response.content_disposition = (
309 309 'attachment; filename=%s.diff' % commit_id_range[:12])
310 310 return response
311 311 elif method == 'patch':
312 312 c.diff = safe_unicode(diff)
313 313 patch = render(
314 314 'rhodecode:templates/changeset/patch_changeset.mako',
315 315 self._get_template_context(c), self.request)
316 316 response = Response(patch)
317 317 response.content_type = 'text/plain'
318 318 return response
319 319 elif method == 'raw':
320 320 response = Response(diff)
321 321 response.content_type = 'text/plain'
322 322 return response
323 323 elif method == 'show':
324 324 if len(c.commit_ranges) == 1:
325 325 html = render(
326 326 'rhodecode:templates/changeset/changeset.mako',
327 327 self._get_template_context(c), self.request)
328 328 return Response(html)
329 329 else:
330 330 c.ancestor = None
331 331 c.target_repo = self.db_repo
332 332 html = render(
333 333 'rhodecode:templates/changeset/changeset_range.mako',
334 334 self._get_template_context(c), self.request)
335 335 return Response(html)
336 336
337 337 raise HTTPBadRequest()
338 338
339 339 @LoginRequired()
340 340 @HasRepoPermissionAnyDecorator(
341 341 'repository.read', 'repository.write', 'repository.admin')
342 342 def repo_commit_show(self):
343 343 commit_id = self.request.matchdict['commit_id']
344 344 return self._commit(commit_id, method='show')
345 345
346 346 @LoginRequired()
347 347 @HasRepoPermissionAnyDecorator(
348 348 'repository.read', 'repository.write', 'repository.admin')
349 349 def repo_commit_raw(self):
350 350 commit_id = self.request.matchdict['commit_id']
351 351 return self._commit(commit_id, method='raw')
352 352
353 353 @LoginRequired()
354 354 @HasRepoPermissionAnyDecorator(
355 355 'repository.read', 'repository.write', 'repository.admin')
356 356 def repo_commit_patch(self):
357 357 commit_id = self.request.matchdict['commit_id']
358 358 return self._commit(commit_id, method='patch')
359 359
360 360 @LoginRequired()
361 361 @HasRepoPermissionAnyDecorator(
362 362 'repository.read', 'repository.write', 'repository.admin')
363 363 def repo_commit_download(self):
364 364 commit_id = self.request.matchdict['commit_id']
365 365 return self._commit(commit_id, method='download')
366 366
367 367 def _commit_comments_create(self, commit_id, comments):
368 368 _ = self.request.translate
369 369 data = {}
370 370 if not comments:
371 371 return
372 372
373 373 commit = self.db_repo.get_commit(commit_id)
374 374
375 375 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
376 376 for entry in comments:
377 377 c = self.load_default_context()
378 378 comment_type = entry['comment_type']
379 379 text = entry['text']
380 380 status = entry['status']
381 381 is_draft = str2bool(entry['is_draft'])
382 382 resolves_comment_id = entry['resolves_comment_id']
383 383 f_path = entry['f_path']
384 384 line_no = entry['line']
385 385 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
386 386
387 387 if status:
388 388 text = text or (_('Status change %(transition_icon)s %(status)s')
389 389 % {'transition_icon': '>',
390 390 'status': ChangesetStatus.get_status_lbl(status)})
391 391
392 392 comment = CommentsModel().create(
393 393 text=text,
394 394 repo=self.db_repo.repo_id,
395 395 user=self._rhodecode_db_user.user_id,
396 396 commit_id=commit_id,
397 397 f_path=f_path,
398 398 line_no=line_no,
399 399 status_change=(ChangesetStatus.get_status_lbl(status)
400 400 if status else None),
401 401 status_change_type=status,
402 402 comment_type=comment_type,
403 403 is_draft=is_draft,
404 404 resolves_comment_id=resolves_comment_id,
405 405 auth_user=self._rhodecode_user,
406 406 send_email=not is_draft, # skip notification for draft comments
407 407 )
408 408 is_inline = comment.is_inline
409 409
410 410 # get status if set !
411 411 if status:
412 412 # `dont_allow_on_closed_pull_request = True` means
413 413 # if latest status was from pull request and it's closed
414 414 # disallow changing status !
415 415
416 416 try:
417 417 ChangesetStatusModel().set_status(
418 418 self.db_repo.repo_id,
419 419 status,
420 420 self._rhodecode_db_user.user_id,
421 421 comment,
422 422 revision=commit_id,
423 423 dont_allow_on_closed_pull_request=True
424 424 )
425 425 except StatusChangeOnClosedPullRequestError:
426 426 msg = _('Changing the status of a commit associated with '
427 427 'a closed pull request is not allowed')
428 428 log.exception(msg)
429 429 h.flash(msg, category='warning')
430 430 raise HTTPFound(h.route_path(
431 431 'repo_commit', repo_name=self.db_repo_name,
432 432 commit_id=commit_id))
433 433
434 434 Session().flush()
435 435 # this is somehow required to get access to some relationship
436 436 # loaded on comment
437 437 Session().refresh(comment)
438 438
439 439 # skip notifications for drafts
440 440 if not is_draft:
441 441 CommentsModel().trigger_commit_comment_hook(
442 442 self.db_repo, self._rhodecode_user, 'create',
443 443 data={'comment': comment, 'commit': commit})
444 444
445 445 comment_id = comment.comment_id
446 446 data[comment_id] = {
447 447 'target_id': target_elem_id
448 448 }
449 449 Session().flush()
450 450
451 451 c.co = comment
452 452 c.at_version_num = 0
453 453 c.is_new = True
454 454 rendered_comment = render(
455 455 'rhodecode:templates/changeset/changeset_comment_block.mako',
456 456 self._get_template_context(c), self.request)
457 457
458 458 data[comment_id].update(comment.get_dict())
459 459 data[comment_id].update({'rendered_text': rendered_comment})
460 460
461 461 # finalize, commit and redirect
462 462 Session().commit()
463 463
464 464 # skip channelstream for draft comments
465 465 if not all_drafts:
466 466 comment_broadcast_channel = channelstream.comment_channel(
467 467 self.db_repo_name, commit_obj=commit)
468 468
469 469 comment_data = data
470 470 posted_comment_type = 'inline' if is_inline else 'general'
471 471 if len(data) == 1:
472 472 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
473 473 else:
474 474 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
475 475
476 476 channelstream.comment_channelstream_push(
477 477 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
478 478 comment_data=comment_data)
479 479
480 480 return data
481 481
482 482 @LoginRequired()
483 483 @NotAnonymous()
484 484 @HasRepoPermissionAnyDecorator(
485 485 'repository.read', 'repository.write', 'repository.admin')
486 486 @CSRFRequired()
487 487 def repo_commit_comment_create(self):
488 488 _ = self.request.translate
489 489 commit_id = self.request.matchdict['commit_id']
490 490
491 491 multi_commit_ids = []
492 492 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
493 493 if _commit_id not in ['', None, EmptyCommit.raw_id]:
494 494 if _commit_id not in multi_commit_ids:
495 495 multi_commit_ids.append(_commit_id)
496 496
497 497 commit_ids = multi_commit_ids or [commit_id]
498 498
499 499 data = []
500 500 # Multiple comments for each passed commit id
501 501 for current_id in filter(None, commit_ids):
502 502 comment_data = {
503 503 'comment_type': self.request.POST.get('comment_type'),
504 504 'text': self.request.POST.get('text'),
505 505 'status': self.request.POST.get('changeset_status', None),
506 506 'is_draft': self.request.POST.get('draft'),
507 507 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
508 508 'close_pull_request': self.request.POST.get('close_pull_request'),
509 509 'f_path': self.request.POST.get('f_path'),
510 510 'line': self.request.POST.get('line'),
511 511 }
512 512 comment = self._commit_comments_create(commit_id=current_id, comments=[comment_data])
513 513 data.append(comment)
514 514
515 515 return data if len(data) > 1 else data[0]
516 516
517 517 @LoginRequired()
518 518 @NotAnonymous()
519 519 @HasRepoPermissionAnyDecorator(
520 520 'repository.read', 'repository.write', 'repository.admin')
521 521 @CSRFRequired()
522 522 def repo_commit_comment_preview(self):
523 523 # Technically a CSRF token is not needed as no state changes with this
524 524 # call. However, as this is a POST is better to have it, so automated
525 525 # tools don't flag it as potential CSRF.
526 526 # Post is required because the payload could be bigger than the maximum
527 527 # allowed by GET.
528 528
529 529 text = self.request.POST.get('text')
530 530 renderer = self.request.POST.get('renderer') or 'rst'
531 531 if text:
532 532 return h.render(text, renderer=renderer, mentions=True,
533 533 repo_name=self.db_repo_name)
534 534 return ''
535 535
536 536 @LoginRequired()
537 537 @HasRepoPermissionAnyDecorator(
538 538 'repository.read', 'repository.write', 'repository.admin')
539 539 @CSRFRequired()
540 540 def repo_commit_comment_history_view(self):
541 541 c = self.load_default_context()
542 comment_id = self.request.matchdict['comment_id']
542 543 comment_history_id = self.request.matchdict['comment_history_id']
543 544
544 comment = ChangesetComment.get_or_404(comment_history_id)
545 comment = ChangesetComment.get_or_404(comment_id)
545 546 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
546 547 if comment.draft and not comment_owner:
547 548 # if we see draft comments history, we only allow this for owner
548 549 raise HTTPNotFound()
549 550
550 551 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
551 552 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
552 553
553 554 if is_repo_comment:
554 555 c.comment_history = comment_history
555 556
556 557 rendered_comment = render(
557 558 'rhodecode:templates/changeset/comment_history.mako',
558 559 self._get_template_context(c), self.request)
559 560 return rendered_comment
560 561 else:
561 562 log.warning('No permissions for user %s to show comment_history_id: %s',
562 563 self._rhodecode_db_user, comment_history_id)
563 564 raise HTTPNotFound()
564 565
565 566 @LoginRequired()
566 567 @NotAnonymous()
567 568 @HasRepoPermissionAnyDecorator(
568 569 'repository.read', 'repository.write', 'repository.admin')
569 570 @CSRFRequired()
570 571 def repo_commit_comment_attachment_upload(self):
571 572 c = self.load_default_context()
572 573 upload_key = 'attachment'
573 574
574 575 file_obj = self.request.POST.get(upload_key)
575 576
576 577 if file_obj is None:
577 578 self.request.response.status = 400
578 579 return {'store_fid': None,
579 580 'access_path': None,
580 581 'error': '{} data field is missing'.format(upload_key)}
581 582
582 583 if not hasattr(file_obj, 'filename'):
583 584 self.request.response.status = 400
584 585 return {'store_fid': None,
585 586 'access_path': None,
586 587 'error': 'filename cannot be read from the data field'}
587 588
588 589 filename = file_obj.filename
589 590 file_display_name = filename
590 591
591 592 metadata = {
592 593 'user_uploaded': {'username': self._rhodecode_user.username,
593 594 'user_id': self._rhodecode_user.user_id,
594 595 'ip': self._rhodecode_user.ip_addr}}
595 596
596 597 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
597 598 allowed_extensions = [
598 599 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
599 600 '.pptx', '.txt', '.xlsx', '.zip']
600 601 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
601 602
602 603 try:
603 604 storage = store_utils.get_file_storage(self.request.registry.settings)
604 605 store_uid, metadata = storage.save_file(
605 606 file_obj.file, filename, extra_metadata=metadata,
606 607 extensions=allowed_extensions, max_filesize=max_file_size)
607 608 except FileNotAllowedException:
608 609 self.request.response.status = 400
609 610 permitted_extensions = ', '.join(allowed_extensions)
610 611 error_msg = 'File `{}` is not allowed. ' \
611 612 'Only following extensions are permitted: {}'.format(
612 613 filename, permitted_extensions)
613 614 return {'store_fid': None,
614 615 'access_path': None,
615 616 'error': error_msg}
616 617 except FileOverSizeException:
617 618 self.request.response.status = 400
618 619 limit_mb = h.format_byte_size_binary(max_file_size)
619 620 return {'store_fid': None,
620 621 'access_path': None,
621 622 'error': 'File {} is exceeding allowed limit of {}.'.format(
622 623 filename, limit_mb)}
623 624
624 625 try:
625 626 entry = FileStore.create(
626 627 file_uid=store_uid, filename=metadata["filename"],
627 628 file_hash=metadata["sha256"], file_size=metadata["size"],
628 629 file_display_name=file_display_name,
629 630 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
630 631 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
631 632 scope_repo_id=self.db_repo.repo_id
632 633 )
633 634 Session().add(entry)
634 635 Session().commit()
635 636 log.debug('Stored upload in DB as %s', entry)
636 637 except Exception:
637 638 log.exception('Failed to store file %s', filename)
638 639 self.request.response.status = 400
639 640 return {'store_fid': None,
640 641 'access_path': None,
641 642 'error': 'File {} failed to store in DB.'.format(filename)}
642 643
643 644 Session().commit()
644 645
645 646 return {
646 647 'store_fid': store_uid,
647 648 'access_path': h.route_path(
648 649 'download_file', fid=store_uid),
649 650 'fqn_access_path': h.route_url(
650 651 'download_file', fid=store_uid),
651 652 'repo_access_path': h.route_path(
652 653 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
653 654 'repo_fqn_access_path': h.route_url(
654 655 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
655 656 }
656 657
657 658 @LoginRequired()
658 659 @NotAnonymous()
659 660 @HasRepoPermissionAnyDecorator(
660 661 'repository.read', 'repository.write', 'repository.admin')
661 662 @CSRFRequired()
662 663 def repo_commit_comment_delete(self):
663 664 commit_id = self.request.matchdict['commit_id']
664 665 comment_id = self.request.matchdict['comment_id']
665 666
666 667 comment = ChangesetComment.get_or_404(comment_id)
667 668 if not comment:
668 669 log.debug('Comment with id:%s not found, skipping', comment_id)
669 670 # comment already deleted in another call probably
670 671 return True
671 672
672 673 if comment.immutable:
673 674 # don't allow deleting comments that are immutable
674 675 raise HTTPForbidden()
675 676
676 677 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
677 678 super_admin = h.HasPermissionAny('hg.admin')()
678 679 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
679 680 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
680 681 comment_repo_admin = is_repo_admin and is_repo_comment
681 682
682 683 if comment.draft and not comment_owner:
683 684 # We never allow to delete draft comments for other than owners
684 685 raise HTTPNotFound()
685 686
686 687 if super_admin or comment_owner or comment_repo_admin:
687 688 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
688 689 Session().commit()
689 690 return True
690 691 else:
691 692 log.warning('No permissions for user %s to delete comment_id: %s',
692 693 self._rhodecode_db_user, comment_id)
693 694 raise HTTPNotFound()
694 695
695 696 @LoginRequired()
696 697 @NotAnonymous()
697 698 @HasRepoPermissionAnyDecorator(
698 699 'repository.read', 'repository.write', 'repository.admin')
699 700 @CSRFRequired()
700 701 def repo_commit_comment_edit(self):
701 702 self.load_default_context()
702 703
703 704 commit_id = self.request.matchdict['commit_id']
704 705 comment_id = self.request.matchdict['comment_id']
705 706 comment = ChangesetComment.get_or_404(comment_id)
706 707
707 708 if comment.immutable:
708 709 # don't allow deleting comments that are immutable
709 710 raise HTTPForbidden()
710 711
711 712 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
712 713 super_admin = h.HasPermissionAny('hg.admin')()
713 714 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
714 715 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
715 716 comment_repo_admin = is_repo_admin and is_repo_comment
716 717
717 718 if super_admin or comment_owner or comment_repo_admin:
718 719 text = self.request.POST.get('text')
719 720 version = self.request.POST.get('version')
720 721 if text == comment.text:
721 722 log.warning(
722 723 'Comment(repo): '
723 724 'Trying to create new version '
724 725 'with the same comment body {}'.format(
725 726 comment_id,
726 727 )
727 728 )
728 729 raise HTTPNotFound()
729 730
730 731 if version.isdigit():
731 732 version = int(version)
732 733 else:
733 734 log.warning(
734 735 'Comment(repo): Wrong version type {} {} '
735 736 'for comment {}'.format(
736 737 version,
737 738 type(version),
738 739 comment_id,
739 740 )
740 741 )
741 742 raise HTTPNotFound()
742 743
743 744 try:
744 745 comment_history = CommentsModel().edit(
745 746 comment_id=comment_id,
746 747 text=text,
747 748 auth_user=self._rhodecode_user,
748 749 version=version,
749 750 )
750 751 except CommentVersionMismatch:
751 752 raise HTTPConflict()
752 753
753 754 if not comment_history:
754 755 raise HTTPNotFound()
755 756
756 757 if not comment.draft:
757 758 commit = self.db_repo.get_commit(commit_id)
758 759 CommentsModel().trigger_commit_comment_hook(
759 760 self.db_repo, self._rhodecode_user, 'edit',
760 761 data={'comment': comment, 'commit': commit})
761 762
762 763 Session().commit()
763 764 return {
764 765 'comment_history_id': comment_history.comment_history_id,
765 766 'comment_id': comment.comment_id,
766 767 'comment_version': comment_history.version,
767 768 'comment_author_username': comment_history.author.username,
768 769 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
769 770 'comment_created_on': h.age_component(comment_history.created_on,
770 771 time_is_local=True),
771 772 }
772 773 else:
773 774 log.warning('No permissions for user %s to edit comment_id: %s',
774 775 self._rhodecode_db_user, comment_id)
775 776 raise HTTPNotFound()
776 777
777 778 @LoginRequired()
778 779 @HasRepoPermissionAnyDecorator(
779 780 'repository.read', 'repository.write', 'repository.admin')
780 781 def repo_commit_data(self):
781 782 commit_id = self.request.matchdict['commit_id']
782 783 self.load_default_context()
783 784
784 785 try:
785 786 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
786 787 except CommitDoesNotExistError as e:
787 788 return EmptyCommit(message=str(e))
788 789
789 790 @LoginRequired()
790 791 @HasRepoPermissionAnyDecorator(
791 792 'repository.read', 'repository.write', 'repository.admin')
792 793 def repo_commit_children(self):
793 794 commit_id = self.request.matchdict['commit_id']
794 795 self.load_default_context()
795 796
796 797 try:
797 798 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
798 799 children = commit.children
799 800 except CommitDoesNotExistError:
800 801 children = []
801 802
802 803 result = {"results": children}
803 804 return result
804 805
805 806 @LoginRequired()
806 807 @HasRepoPermissionAnyDecorator(
807 808 'repository.read', 'repository.write', 'repository.admin')
808 809 def repo_commit_parents(self):
809 810 commit_id = self.request.matchdict['commit_id']
810 811 self.load_default_context()
811 812
812 813 try:
813 814 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
814 815 parents = commit.parents
815 816 except CommitDoesNotExistError:
816 817 parents = []
817 818 result = {"results": parents}
818 819 return result
@@ -1,305 +1,305 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPFound
25 25
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import RepoAppView
30 30
31 31 from rhodecode.lib import helpers as h
32 32 from rhodecode.lib import diffs, codeblocks
33 33 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
34 34 from rhodecode.lib.utils import safe_str
35 35 from rhodecode.lib.utils2 import safe_unicode, str2bool
36 36 from rhodecode.lib.view_utils import parse_path_ref, get_commit_from_ref_name
37 37 from rhodecode.lib.vcs.exceptions import (
38 38 EmptyRepositoryError, RepositoryError, RepositoryRequirementError,
39 39 NodeDoesNotExistError)
40 40 from rhodecode.model.db import Repository, ChangesetStatus
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 class RepoCompareView(RepoAppView):
46 46 def load_default_context(self):
47 47 c = self._get_local_tmpl_context(include_app_defaults=True)
48 48 c.rhodecode_repo = self.rhodecode_vcs_repo
49 49 return c
50 50
51 51 def _get_commit_or_redirect(
52 52 self, ref, ref_type, repo, redirect_after=True, partial=False):
53 53 """
54 54 This is a safe way to get a commit. If an error occurs it
55 55 redirects to a commit with a proper message. If partial is set
56 56 then it does not do redirect raise and throws an exception instead.
57 57 """
58 58 _ = self.request.translate
59 59 try:
60 60 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
61 61 except EmptyRepositoryError:
62 62 if not redirect_after:
63 63 return repo.scm_instance().EMPTY_COMMIT
64 64 h.flash(h.literal(_('There are no commits yet')),
65 65 category='warning')
66 66 if not partial:
67 67 raise HTTPFound(
68 68 h.route_path('repo_summary', repo_name=repo.repo_name))
69 69 raise HTTPBadRequest()
70 70
71 71 except RepositoryError as e:
72 72 log.exception(safe_str(e))
73 h.flash(safe_str(h.escape(e)), category='warning')
73 h.flash(h.escape(safe_str(e)), category='warning')
74 74 if not partial:
75 75 raise HTTPFound(
76 76 h.route_path('repo_summary', repo_name=repo.repo_name))
77 77 raise HTTPBadRequest()
78 78
79 79 @LoginRequired()
80 80 @HasRepoPermissionAnyDecorator(
81 81 'repository.read', 'repository.write', 'repository.admin')
82 82 def compare_select(self):
83 83 _ = self.request.translate
84 84 c = self.load_default_context()
85 85
86 86 source_repo = self.db_repo_name
87 87 target_repo = self.request.GET.get('target_repo', source_repo)
88 88 c.source_repo = Repository.get_by_repo_name(source_repo)
89 89 c.target_repo = Repository.get_by_repo_name(target_repo)
90 90
91 91 if c.source_repo is None or c.target_repo is None:
92 92 raise HTTPNotFound()
93 93
94 94 c.compare_home = True
95 95 c.commit_ranges = []
96 96 c.collapse_all_commits = False
97 97 c.diffset = None
98 98 c.limited_diff = False
99 99 c.source_ref = c.target_ref = _('Select commit')
100 100 c.source_ref_type = ""
101 101 c.target_ref_type = ""
102 102 c.commit_statuses = ChangesetStatus.STATUSES
103 103 c.preview_mode = False
104 104 c.file_path = None
105 105
106 106 return self._get_template_context(c)
107 107
108 108 @LoginRequired()
109 109 @HasRepoPermissionAnyDecorator(
110 110 'repository.read', 'repository.write', 'repository.admin')
111 111 def compare(self):
112 112 _ = self.request.translate
113 113 c = self.load_default_context()
114 114
115 115 source_ref_type = self.request.matchdict['source_ref_type']
116 116 source_ref = self.request.matchdict['source_ref']
117 117 target_ref_type = self.request.matchdict['target_ref_type']
118 118 target_ref = self.request.matchdict['target_ref']
119 119
120 120 # source_ref will be evaluated in source_repo
121 121 source_repo_name = self.db_repo_name
122 122 source_path, source_id = parse_path_ref(source_ref)
123 123
124 124 # target_ref will be evaluated in target_repo
125 125 target_repo_name = self.request.GET.get('target_repo', source_repo_name)
126 126 target_path, target_id = parse_path_ref(
127 127 target_ref, default_path=self.request.GET.get('f_path', ''))
128 128
129 129 # if merge is True
130 130 # Show what changes since the shared ancestor commit of target/source
131 131 # the source would get if it was merged with target. Only commits
132 132 # which are in target but not in source will be shown.
133 133 merge = str2bool(self.request.GET.get('merge'))
134 134 # if merge is False
135 135 # Show a raw diff of source/target refs even if no ancestor exists
136 136
137 137 # c.fulldiff disables cut_off_limit
138 138 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
139 139
140 140 # fetch global flags of ignore ws or context lines
141 141 diff_context = diffs.get_diff_context(self.request)
142 142 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
143 143
144 144 c.file_path = target_path
145 145 c.commit_statuses = ChangesetStatus.STATUSES
146 146
147 147 # if partial, returns just compare_commits.html (commits log)
148 148 partial = self.request.is_xhr
149 149
150 150 # swap url for compare_diff page
151 151 c.swap_url = h.route_path(
152 152 'repo_compare',
153 153 repo_name=target_repo_name,
154 154 source_ref_type=target_ref_type,
155 155 source_ref=target_ref,
156 156 target_repo=source_repo_name,
157 157 target_ref_type=source_ref_type,
158 158 target_ref=source_ref,
159 159 _query=dict(merge=merge and '1' or '', f_path=target_path))
160 160
161 161 source_repo = Repository.get_by_repo_name(source_repo_name)
162 162 target_repo = Repository.get_by_repo_name(target_repo_name)
163 163
164 164 if source_repo is None:
165 165 log.error('Could not find the source repo: {}'
166 166 .format(source_repo_name))
167 167 h.flash(_('Could not find the source repo: `{}`')
168 168 .format(h.escape(source_repo_name)), category='error')
169 169 raise HTTPFound(
170 170 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
171 171
172 172 if target_repo is None:
173 173 log.error('Could not find the target repo: {}'
174 174 .format(source_repo_name))
175 175 h.flash(_('Could not find the target repo: `{}`')
176 176 .format(h.escape(target_repo_name)), category='error')
177 177 raise HTTPFound(
178 178 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
179 179
180 180 source_scm = source_repo.scm_instance()
181 181 target_scm = target_repo.scm_instance()
182 182
183 183 source_alias = source_scm.alias
184 184 target_alias = target_scm.alias
185 185 if source_alias != target_alias:
186 186 msg = _('The comparison of two different kinds of remote repos '
187 187 'is not available')
188 188 log.error(msg)
189 189 h.flash(msg, category='error')
190 190 raise HTTPFound(
191 191 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
192 192
193 193 source_commit = self._get_commit_or_redirect(
194 194 ref=source_id, ref_type=source_ref_type, repo=source_repo,
195 195 partial=partial)
196 196 target_commit = self._get_commit_or_redirect(
197 197 ref=target_id, ref_type=target_ref_type, repo=target_repo,
198 198 partial=partial)
199 199
200 200 c.compare_home = False
201 201 c.source_repo = source_repo
202 202 c.target_repo = target_repo
203 203 c.source_ref = source_ref
204 204 c.target_ref = target_ref
205 205 c.source_ref_type = source_ref_type
206 206 c.target_ref_type = target_ref_type
207 207
208 208 pre_load = ["author", "date", "message", "branch"]
209 209 c.ancestor = None
210 210
211 211 try:
212 212 c.commit_ranges = source_scm.compare(
213 213 source_commit.raw_id, target_commit.raw_id,
214 214 target_scm, merge, pre_load=pre_load) or []
215 215 if merge:
216 216 c.ancestor = source_scm.get_common_ancestor(
217 217 source_commit.raw_id, target_commit.raw_id, target_scm)
218 218 except RepositoryRequirementError:
219 219 msg = _('Could not compare repos with different '
220 220 'large file settings')
221 221 log.error(msg)
222 222 if partial:
223 223 return Response(msg)
224 224 h.flash(msg, category='error')
225 225 raise HTTPFound(
226 226 h.route_path('repo_compare_select',
227 227 repo_name=self.db_repo_name))
228 228
229 229 c.statuses = self.db_repo.statuses(
230 230 [x.raw_id for x in c.commit_ranges])
231 231
232 232 # auto collapse if we have more than limit
233 233 collapse_limit = diffs.DiffProcessor._collapse_commits_over
234 234 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
235 235
236 236 if partial: # for PR ajax commits loader
237 237 if not c.ancestor:
238 238 return Response('') # cannot merge if there is no ancestor
239 239
240 240 html = render(
241 241 'rhodecode:templates/compare/compare_commits.mako',
242 242 self._get_template_context(c), self.request)
243 243 return Response(html)
244 244
245 245 if c.ancestor:
246 246 # case we want a simple diff without incoming commits,
247 247 # previewing what will be merged.
248 248 # Make the diff on target repo (which is known to have target_ref)
249 249 log.debug('Using ancestor %s as source_ref instead of %s',
250 250 c.ancestor, source_ref)
251 251 source_repo = target_repo
252 252 source_commit = target_repo.get_commit(commit_id=c.ancestor)
253 253
254 254 # diff_limit will cut off the whole diff if the limit is applied
255 255 # otherwise it will just hide the big files from the front-end
256 256 diff_limit = c.visual.cut_off_limit_diff
257 257 file_limit = c.visual.cut_off_limit_file
258 258
259 259 log.debug('calculating diff between '
260 260 'source_ref:%s and target_ref:%s for repo `%s`',
261 261 source_commit, target_commit,
262 262 safe_unicode(source_repo.scm_instance().path))
263 263
264 264 if source_commit.repository != target_commit.repository:
265 265 msg = _(
266 266 "Repositories unrelated. "
267 267 "Cannot compare commit %(commit1)s from repository %(repo1)s "
268 268 "with commit %(commit2)s from repository %(repo2)s.") % {
269 269 'commit1': h.show_id(source_commit),
270 270 'repo1': source_repo.repo_name,
271 271 'commit2': h.show_id(target_commit),
272 272 'repo2': target_repo.repo_name,
273 273 }
274 274 h.flash(msg, category='error')
275 275 raise HTTPFound(
276 276 h.route_path('repo_compare_select',
277 277 repo_name=self.db_repo_name))
278 278
279 279 txt_diff = source_repo.scm_instance().get_diff(
280 280 commit1=source_commit, commit2=target_commit,
281 281 path=target_path, path1=source_path,
282 282 ignore_whitespace=hide_whitespace_changes, context=diff_context)
283 283
284 284 diff_processor = diffs.DiffProcessor(
285 285 txt_diff, format='newdiff', diff_limit=diff_limit,
286 286 file_limit=file_limit, show_full_diff=c.fulldiff)
287 287 _parsed = diff_processor.prepare()
288 288
289 289 diffset = codeblocks.DiffSet(
290 290 repo_name=source_repo.repo_name,
291 291 source_node_getter=codeblocks.diffset_node_getter(source_commit),
292 292 target_repo_name=self.db_repo_name,
293 293 target_node_getter=codeblocks.diffset_node_getter(target_commit),
294 294 )
295 295 c.diffset = self.path_filter.render_patchset_filtered(
296 296 diffset, _parsed, source_ref, target_ref)
297 297
298 298 c.preview_mode = merge
299 299 c.source_commit = source_commit
300 300 c.target_commit = target_commit
301 301
302 302 html = render(
303 303 'rhodecode:templates/compare/compare_diff.mako',
304 304 self._get_template_context(c), self.request)
305 305 return Response(html) No newline at end of file
@@ -1,1581 +1,1581 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import itertools
22 22 import logging
23 23 import os
24 24 import shutil
25 25 import tempfile
26 26 import collections
27 27 import urllib
28 28 import pathlib2
29 29
30 30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
31 31
32 32 from pyramid.renderers import render
33 33 from pyramid.response import Response
34 34
35 35 import rhodecode
36 36 from rhodecode.apps._base import RepoAppView
37 37
38 38
39 39 from rhodecode.lib import diffs, helpers as h, rc_cache
40 40 from rhodecode.lib import audit_logger
41 41 from rhodecode.lib.view_utils import parse_path_ref
42 42 from rhodecode.lib.exceptions import NonRelativePathError
43 43 from rhodecode.lib.codeblocks import (
44 44 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
45 45 from rhodecode.lib.utils2 import (
46 46 convert_line_endings, detect_mode, safe_str, str2bool, safe_int, sha1, safe_unicode)
47 47 from rhodecode.lib.auth import (
48 48 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
49 49 from rhodecode.lib.vcs import path as vcspath
50 50 from rhodecode.lib.vcs.backends.base import EmptyCommit
51 51 from rhodecode.lib.vcs.conf import settings
52 52 from rhodecode.lib.vcs.nodes import FileNode
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
55 55 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
56 56 NodeDoesNotExistError, CommitError, NodeError)
57 57
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.db import Repository
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 class RepoFilesView(RepoAppView):
65 65
66 66 @staticmethod
67 67 def adjust_file_path_for_svn(f_path, repo):
68 68 """
69 69 Computes the relative path of `f_path`.
70 70
71 71 This is mainly based on prefix matching of the recognized tags and
72 72 branches in the underlying repository.
73 73 """
74 74 tags_and_branches = itertools.chain(
75 75 repo.branches.iterkeys(),
76 76 repo.tags.iterkeys())
77 77 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
78 78
79 79 for name in tags_and_branches:
80 80 if f_path.startswith('{}/'.format(name)):
81 81 f_path = vcspath.relpath(f_path, name)
82 82 break
83 83 return f_path
84 84
85 85 def load_default_context(self):
86 86 c = self._get_local_tmpl_context(include_app_defaults=True)
87 87 c.rhodecode_repo = self.rhodecode_vcs_repo
88 88 c.enable_downloads = self.db_repo.enable_downloads
89 89 return c
90 90
91 91 def _ensure_not_locked(self, commit_id='tip'):
92 92 _ = self.request.translate
93 93
94 94 repo = self.db_repo
95 95 if repo.enable_locking and repo.locked[0]:
96 96 h.flash(_('This repository has been locked by %s on %s')
97 97 % (h.person_by_id(repo.locked[0]),
98 98 h.format_date(h.time_to_datetime(repo.locked[1]))),
99 99 'warning')
100 100 files_url = h.route_path(
101 101 'repo_files:default_path',
102 102 repo_name=self.db_repo_name, commit_id=commit_id)
103 103 raise HTTPFound(files_url)
104 104
105 105 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
106 106 _ = self.request.translate
107 107
108 108 if not is_head:
109 109 message = _('Cannot modify file. '
110 110 'Given commit `{}` is not head of a branch.').format(commit_id)
111 111 h.flash(message, category='warning')
112 112
113 113 if json_mode:
114 114 return message
115 115
116 116 files_url = h.route_path(
117 117 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
118 118 f_path=f_path)
119 119 raise HTTPFound(files_url)
120 120
121 121 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
122 122 _ = self.request.translate
123 123
124 124 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
125 125 self.db_repo_name, branch_name)
126 126 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
127 127 message = _('Branch `{}` changes forbidden by rule {}.').format(
128 128 h.escape(branch_name), h.escape(rule))
129 129 h.flash(message, 'warning')
130 130
131 131 if json_mode:
132 132 return message
133 133
134 134 files_url = h.route_path(
135 135 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
136 136
137 137 raise HTTPFound(files_url)
138 138
139 139 def _get_commit_and_path(self):
140 140 default_commit_id = self.db_repo.landing_ref_name
141 141 default_f_path = '/'
142 142
143 143 commit_id = self.request.matchdict.get(
144 144 'commit_id', default_commit_id)
145 145 f_path = self._get_f_path(self.request.matchdict, default_f_path)
146 146 return commit_id, f_path
147 147
148 148 def _get_default_encoding(self, c):
149 149 enc_list = getattr(c, 'default_encodings', [])
150 150 return enc_list[0] if enc_list else 'UTF-8'
151 151
152 152 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
153 153 """
154 154 This is a safe way to get commit. If an error occurs it redirects to
155 155 tip with proper message
156 156
157 157 :param commit_id: id of commit to fetch
158 158 :param redirect_after: toggle redirection
159 159 """
160 160 _ = self.request.translate
161 161
162 162 try:
163 163 return self.rhodecode_vcs_repo.get_commit(commit_id)
164 164 except EmptyRepositoryError:
165 165 if not redirect_after:
166 166 return None
167 167
168 168 _url = h.route_path(
169 169 'repo_files_add_file',
170 170 repo_name=self.db_repo_name, commit_id=0, f_path='')
171 171
172 172 if h.HasRepoPermissionAny(
173 173 'repository.write', 'repository.admin')(self.db_repo_name):
174 174 add_new = h.link_to(
175 175 _('Click here to add a new file.'), _url, class_="alert-link")
176 176 else:
177 177 add_new = ""
178 178
179 179 h.flash(h.literal(
180 180 _('There are no files yet. %s') % add_new), category='warning')
181 181 raise HTTPFound(
182 182 h.route_path('repo_summary', repo_name=self.db_repo_name))
183 183
184 184 except (CommitDoesNotExistError, LookupError) as e:
185 185 msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
186 186 h.flash(msg, category='error')
187 187 raise HTTPNotFound()
188 188 except RepositoryError as e:
189 h.flash(safe_str(h.escape(e)), category='error')
189 h.flash(h.escape(safe_str(e)), category='error')
190 190 raise HTTPNotFound()
191 191
192 192 def _get_filenode_or_redirect(self, commit_obj, path):
193 193 """
194 194 Returns file_node, if error occurs or given path is directory,
195 195 it'll redirect to top level path
196 196 """
197 197 _ = self.request.translate
198 198
199 199 try:
200 200 file_node = commit_obj.get_node(path)
201 201 if file_node.is_dir():
202 202 raise RepositoryError('The given path is a directory')
203 203 except CommitDoesNotExistError:
204 204 log.exception('No such commit exists for this repository')
205 205 h.flash(_('No such commit exists for this repository'), category='error')
206 206 raise HTTPNotFound()
207 207 except RepositoryError as e:
208 208 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
209 h.flash(safe_str(h.escape(e)), category='error')
209 h.flash(h.escape(safe_str(e)), category='error')
210 210 raise HTTPNotFound()
211 211
212 212 return file_node
213 213
214 214 def _is_valid_head(self, commit_id, repo, landing_ref):
215 215 branch_name = sha_commit_id = ''
216 216 is_head = False
217 217 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
218 218
219 219 for _branch_name, branch_commit_id in repo.branches.items():
220 220 # simple case we pass in branch name, it's a HEAD
221 221 if commit_id == _branch_name:
222 222 is_head = True
223 223 branch_name = _branch_name
224 224 sha_commit_id = branch_commit_id
225 225 break
226 226 # case when we pass in full sha commit_id, which is a head
227 227 elif commit_id == branch_commit_id:
228 228 is_head = True
229 229 branch_name = _branch_name
230 230 sha_commit_id = branch_commit_id
231 231 break
232 232
233 233 if h.is_svn(repo) and not repo.is_empty():
234 234 # Note: Subversion only has one head.
235 235 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
236 236 is_head = True
237 237 return branch_name, sha_commit_id, is_head
238 238
239 239 # checked branches, means we only need to try to get the branch/commit_sha
240 240 if repo.is_empty():
241 241 is_head = True
242 242 branch_name = landing_ref
243 243 sha_commit_id = EmptyCommit().raw_id
244 244 else:
245 245 commit = repo.get_commit(commit_id=commit_id)
246 246 if commit:
247 247 branch_name = commit.branch
248 248 sha_commit_id = commit.raw_id
249 249
250 250 return branch_name, sha_commit_id, is_head
251 251
252 252 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
253 253
254 254 repo_id = self.db_repo.repo_id
255 255 force_recache = self.get_recache_flag()
256 256
257 257 cache_seconds = safe_int(
258 258 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
259 259 cache_on = not force_recache and cache_seconds > 0
260 260 log.debug(
261 261 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
262 262 'with caching: %s[TTL: %ss]' % (
263 263 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
264 264
265 265 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
266 266 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
267 267
268 268 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
269 269 def compute_file_tree(ver, _name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
270 270 log.debug('Generating cached file tree at ver:%s for repo_id: %s, %s, %s',
271 271 ver, _repo_id, _commit_id, _f_path)
272 272
273 273 c.full_load = _full_load
274 274 return render(
275 275 'rhodecode:templates/files/files_browser_tree.mako',
276 276 self._get_template_context(c), self.request, _at_rev)
277 277
278 278 return compute_file_tree(
279 279 rc_cache.FILE_TREE_CACHE_VER, self.db_repo.repo_name_hash,
280 280 self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
281 281
282 282 def _get_archive_spec(self, fname):
283 283 log.debug('Detecting archive spec for: `%s`', fname)
284 284
285 285 fileformat = None
286 286 ext = None
287 287 content_type = None
288 288 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
289 289
290 290 if fname.endswith(extension):
291 291 fileformat = a_type
292 292 log.debug('archive is of type: %s', fileformat)
293 293 ext = extension
294 294 break
295 295
296 296 if not fileformat:
297 297 raise ValueError()
298 298
299 299 # left over part of whole fname is the commit
300 300 commit_id = fname[:-len(ext)]
301 301
302 302 return commit_id, ext, fileformat, content_type
303 303
304 304 def create_pure_path(self, *parts):
305 305 # Split paths and sanitize them, removing any ../ etc
306 306 sanitized_path = [
307 307 x for x in pathlib2.PurePath(*parts).parts
308 308 if x not in ['.', '..']]
309 309
310 310 pure_path = pathlib2.PurePath(*sanitized_path)
311 311 return pure_path
312 312
313 313 def _is_lf_enabled(self, target_repo):
314 314 lf_enabled = False
315 315
316 316 lf_key_for_vcs_map = {
317 317 'hg': 'extensions_largefiles',
318 318 'git': 'vcs_git_lfs_enabled'
319 319 }
320 320
321 321 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
322 322
323 323 if lf_key_for_vcs:
324 324 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
325 325
326 326 return lf_enabled
327 327
328 328 def _get_archive_name(self, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
329 329 # original backward compat name of archive
330 330 clean_name = safe_str(db_repo_name.replace('/', '_'))
331 331
332 332 # e.g vcsserver.zip
333 333 # e.g vcsserver-abcdefgh.zip
334 334 # e.g vcsserver-abcdefgh-defghijk.zip
335 335 archive_name = '{}{}{}{}{}{}'.format(
336 336 clean_name,
337 337 '-sub' if subrepos else '',
338 338 commit_sha,
339 339 '-{}'.format('plain') if not with_hash else '',
340 340 '-{}'.format(path_sha) if path_sha else '',
341 341 ext)
342 342 return archive_name
343 343
344 344 @LoginRequired()
345 345 @HasRepoPermissionAnyDecorator(
346 346 'repository.read', 'repository.write', 'repository.admin')
347 347 def repo_archivefile(self):
348 348 # archive cache config
349 349 from rhodecode import CONFIG
350 350 _ = self.request.translate
351 351 self.load_default_context()
352 352 default_at_path = '/'
353 353 fname = self.request.matchdict['fname']
354 354 subrepos = self.request.GET.get('subrepos') == 'true'
355 355 with_hash = str2bool(self.request.GET.get('with_hash', '1'))
356 356 at_path = self.request.GET.get('at_path') or default_at_path
357 357
358 358 if not self.db_repo.enable_downloads:
359 359 return Response(_('Downloads disabled'))
360 360
361 361 try:
362 362 commit_id, ext, fileformat, content_type = \
363 363 self._get_archive_spec(fname)
364 364 except ValueError:
365 365 return Response(_('Unknown archive type for: `{}`').format(
366 366 h.escape(fname)))
367 367
368 368 try:
369 369 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
370 370 except CommitDoesNotExistError:
371 371 return Response(_('Unknown commit_id {}').format(
372 372 h.escape(commit_id)))
373 373 except EmptyRepositoryError:
374 374 return Response(_('Empty repository'))
375 375
376 376 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
377 377 if commit_id != commit.raw_id:
378 378 fname='{}{}'.format(commit.raw_id, ext)
379 379 raise HTTPFound(self.request.current_route_path(fname=fname))
380 380
381 381 try:
382 382 at_path = commit.get_node(at_path).path or default_at_path
383 383 except Exception:
384 384 return Response(_('No node at path {} for this repository').format(at_path))
385 385
386 386 # path sha is part of subdir
387 387 path_sha = ''
388 388 if at_path != default_at_path:
389 389 path_sha = sha1(at_path)[:8]
390 390 short_sha = '-{}'.format(safe_str(commit.short_id))
391 391 # used for cache etc
392 392 archive_name = self._get_archive_name(
393 393 self.db_repo_name, commit_sha=short_sha, ext=ext, subrepos=subrepos,
394 394 path_sha=path_sha, with_hash=with_hash)
395 395
396 396 if not with_hash:
397 397 short_sha = ''
398 398 path_sha = ''
399 399
400 400 # what end client gets served
401 401 response_archive_name = self._get_archive_name(
402 402 self.db_repo_name, commit_sha=short_sha, ext=ext, subrepos=subrepos,
403 403 path_sha=path_sha, with_hash=with_hash)
404 404 # remove extension from our archive directory name
405 405 archive_dir_name = response_archive_name[:-len(ext)]
406 406
407 407 use_cached_archive = False
408 408 archive_cache_dir = CONFIG.get('archive_cache_dir')
409 409 archive_cache_enabled = archive_cache_dir and not self.request.GET.get('no_cache')
410 410 cached_archive_path = None
411 411
412 412 if archive_cache_enabled:
413 413 # check if we it's ok to write, and re-create the archive cache
414 414 if not os.path.isdir(CONFIG['archive_cache_dir']):
415 415 os.makedirs(CONFIG['archive_cache_dir'])
416 416
417 417 cached_archive_path = os.path.join(
418 418 CONFIG['archive_cache_dir'], archive_name)
419 419 if os.path.isfile(cached_archive_path):
420 420 log.debug('Found cached archive in %s', cached_archive_path)
421 421 fd, archive = None, cached_archive_path
422 422 use_cached_archive = True
423 423 else:
424 424 log.debug('Archive %s is not yet cached', archive_name)
425 425
426 426 # generate new archive, as previous was not found in the cache
427 427 if not use_cached_archive:
428 428 _dir = os.path.abspath(archive_cache_dir) if archive_cache_dir else None
429 429 fd, archive = tempfile.mkstemp(dir=_dir)
430 430 log.debug('Creating new temp archive in %s', archive)
431 431 try:
432 432 commit.archive_repo(archive, archive_dir_name=archive_dir_name,
433 433 kind=fileformat, subrepos=subrepos,
434 434 archive_at_path=at_path)
435 435 except ImproperArchiveTypeError:
436 436 return _('Unknown archive type')
437 437 if archive_cache_enabled:
438 438 # if we generated the archive and we have cache enabled
439 439 # let's use this for future
440 440 log.debug('Storing new archive in %s', cached_archive_path)
441 441 shutil.move(archive, cached_archive_path)
442 442 archive = cached_archive_path
443 443
444 444 # store download action
445 445 audit_logger.store_web(
446 446 'repo.archive.download', action_data={
447 447 'user_agent': self.request.user_agent,
448 448 'archive_name': archive_name,
449 449 'archive_spec': fname,
450 450 'archive_cached': use_cached_archive},
451 451 user=self._rhodecode_user,
452 452 repo=self.db_repo,
453 453 commit=True
454 454 )
455 455
456 456 def get_chunked_archive(archive_path):
457 457 with open(archive_path, 'rb') as stream:
458 458 while True:
459 459 data = stream.read(16 * 1024)
460 460 if not data:
461 461 if fd: # fd means we used temporary file
462 462 os.close(fd)
463 463 if not archive_cache_enabled:
464 464 log.debug('Destroying temp archive %s', archive_path)
465 465 os.remove(archive_path)
466 466 break
467 467 yield data
468 468
469 469 response = Response(app_iter=get_chunked_archive(archive))
470 470 response.content_disposition = str('attachment; filename=%s' % response_archive_name)
471 471 response.content_type = str(content_type)
472 472
473 473 return response
474 474
475 475 def _get_file_node(self, commit_id, f_path):
476 476 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
477 477 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
478 478 try:
479 479 node = commit.get_node(f_path)
480 480 if node.is_dir():
481 481 raise NodeError('%s path is a %s not a file'
482 482 % (node, type(node)))
483 483 except NodeDoesNotExistError:
484 484 commit = EmptyCommit(
485 485 commit_id=commit_id,
486 486 idx=commit.idx,
487 487 repo=commit.repository,
488 488 alias=commit.repository.alias,
489 489 message=commit.message,
490 490 author=commit.author,
491 491 date=commit.date)
492 492 node = FileNode(f_path, '', commit=commit)
493 493 else:
494 494 commit = EmptyCommit(
495 495 repo=self.rhodecode_vcs_repo,
496 496 alias=self.rhodecode_vcs_repo.alias)
497 497 node = FileNode(f_path, '', commit=commit)
498 498 return node
499 499
500 500 @LoginRequired()
501 501 @HasRepoPermissionAnyDecorator(
502 502 'repository.read', 'repository.write', 'repository.admin')
503 503 def repo_files_diff(self):
504 504 c = self.load_default_context()
505 505 f_path = self._get_f_path(self.request.matchdict)
506 506 diff1 = self.request.GET.get('diff1', '')
507 507 diff2 = self.request.GET.get('diff2', '')
508 508
509 509 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
510 510
511 511 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
512 512 line_context = self.request.GET.get('context', 3)
513 513
514 514 if not any((diff1, diff2)):
515 515 h.flash(
516 516 'Need query parameter "diff1" or "diff2" to generate a diff.',
517 517 category='error')
518 518 raise HTTPBadRequest()
519 519
520 520 c.action = self.request.GET.get('diff')
521 521 if c.action not in ['download', 'raw']:
522 522 compare_url = h.route_path(
523 523 'repo_compare',
524 524 repo_name=self.db_repo_name,
525 525 source_ref_type='rev',
526 526 source_ref=diff1,
527 527 target_repo=self.db_repo_name,
528 528 target_ref_type='rev',
529 529 target_ref=diff2,
530 530 _query=dict(f_path=f_path))
531 531 # redirect to new view if we render diff
532 532 raise HTTPFound(compare_url)
533 533
534 534 try:
535 535 node1 = self._get_file_node(diff1, path1)
536 536 node2 = self._get_file_node(diff2, f_path)
537 537 except (RepositoryError, NodeError):
538 538 log.exception("Exception while trying to get node from repository")
539 539 raise HTTPFound(
540 540 h.route_path('repo_files', repo_name=self.db_repo_name,
541 541 commit_id='tip', f_path=f_path))
542 542
543 543 if all(isinstance(node.commit, EmptyCommit)
544 544 for node in (node1, node2)):
545 545 raise HTTPNotFound()
546 546
547 547 c.commit_1 = node1.commit
548 548 c.commit_2 = node2.commit
549 549
550 550 if c.action == 'download':
551 551 _diff = diffs.get_gitdiff(node1, node2,
552 552 ignore_whitespace=ignore_whitespace,
553 553 context=line_context)
554 554 diff = diffs.DiffProcessor(_diff, format='gitdiff')
555 555
556 556 response = Response(self.path_filter.get_raw_patch(diff))
557 557 response.content_type = 'text/plain'
558 558 response.content_disposition = (
559 559 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
560 560 )
561 561 charset = self._get_default_encoding(c)
562 562 if charset:
563 563 response.charset = charset
564 564 return response
565 565
566 566 elif c.action == 'raw':
567 567 _diff = diffs.get_gitdiff(node1, node2,
568 568 ignore_whitespace=ignore_whitespace,
569 569 context=line_context)
570 570 diff = diffs.DiffProcessor(_diff, format='gitdiff')
571 571
572 572 response = Response(self.path_filter.get_raw_patch(diff))
573 573 response.content_type = 'text/plain'
574 574 charset = self._get_default_encoding(c)
575 575 if charset:
576 576 response.charset = charset
577 577 return response
578 578
579 579 # in case we ever end up here
580 580 raise HTTPNotFound()
581 581
582 582 @LoginRequired()
583 583 @HasRepoPermissionAnyDecorator(
584 584 'repository.read', 'repository.write', 'repository.admin')
585 585 def repo_files_diff_2way_redirect(self):
586 586 """
587 587 Kept only to make OLD links work
588 588 """
589 589 f_path = self._get_f_path_unchecked(self.request.matchdict)
590 590 diff1 = self.request.GET.get('diff1', '')
591 591 diff2 = self.request.GET.get('diff2', '')
592 592
593 593 if not any((diff1, diff2)):
594 594 h.flash(
595 595 'Need query parameter "diff1" or "diff2" to generate a diff.',
596 596 category='error')
597 597 raise HTTPBadRequest()
598 598
599 599 compare_url = h.route_path(
600 600 'repo_compare',
601 601 repo_name=self.db_repo_name,
602 602 source_ref_type='rev',
603 603 source_ref=diff1,
604 604 target_ref_type='rev',
605 605 target_ref=diff2,
606 606 _query=dict(f_path=f_path, diffmode='sideside',
607 607 target_repo=self.db_repo_name,))
608 608 raise HTTPFound(compare_url)
609 609
610 610 @LoginRequired()
611 611 def repo_files_default_commit_redirect(self):
612 612 """
613 613 Special page that redirects to the landing page of files based on the default
614 614 commit for repository
615 615 """
616 616 c = self.load_default_context()
617 617 ref_name = c.rhodecode_db_repo.landing_ref_name
618 618 landing_url = h.repo_files_by_ref_url(
619 619 c.rhodecode_db_repo.repo_name,
620 620 c.rhodecode_db_repo.repo_type,
621 621 f_path='',
622 622 ref_name=ref_name,
623 623 commit_id='tip',
624 624 query=dict(at=ref_name)
625 625 )
626 626
627 627 raise HTTPFound(landing_url)
628 628
629 629 @LoginRequired()
630 630 @HasRepoPermissionAnyDecorator(
631 631 'repository.read', 'repository.write', 'repository.admin')
632 632 def repo_files(self):
633 633 c = self.load_default_context()
634 634
635 635 view_name = getattr(self.request.matched_route, 'name', None)
636 636
637 637 c.annotate = view_name == 'repo_files:annotated'
638 638 # default is false, but .rst/.md files later are auto rendered, we can
639 639 # overwrite auto rendering by setting this GET flag
640 640 c.renderer = view_name == 'repo_files:rendered' or \
641 641 not self.request.GET.get('no-render', False)
642 642
643 643 commit_id, f_path = self._get_commit_and_path()
644 644
645 645 c.commit = self._get_commit_or_redirect(commit_id)
646 646 c.branch = self.request.GET.get('branch', None)
647 647 c.f_path = f_path
648 648 at_rev = self.request.GET.get('at')
649 649
650 650 # prev link
651 651 try:
652 652 prev_commit = c.commit.prev(c.branch)
653 653 c.prev_commit = prev_commit
654 654 c.url_prev = h.route_path(
655 655 'repo_files', repo_name=self.db_repo_name,
656 656 commit_id=prev_commit.raw_id, f_path=f_path)
657 657 if c.branch:
658 658 c.url_prev += '?branch=%s' % c.branch
659 659 except (CommitDoesNotExistError, VCSError):
660 660 c.url_prev = '#'
661 661 c.prev_commit = EmptyCommit()
662 662
663 663 # next link
664 664 try:
665 665 next_commit = c.commit.next(c.branch)
666 666 c.next_commit = next_commit
667 667 c.url_next = h.route_path(
668 668 'repo_files', repo_name=self.db_repo_name,
669 669 commit_id=next_commit.raw_id, f_path=f_path)
670 670 if c.branch:
671 671 c.url_next += '?branch=%s' % c.branch
672 672 except (CommitDoesNotExistError, VCSError):
673 673 c.url_next = '#'
674 674 c.next_commit = EmptyCommit()
675 675
676 676 # files or dirs
677 677 try:
678 678 c.file = c.commit.get_node(f_path)
679 679 c.file_author = True
680 680 c.file_tree = ''
681 681
682 682 # load file content
683 683 if c.file.is_file():
684 684 c.lf_node = {}
685 685
686 686 has_lf_enabled = self._is_lf_enabled(self.db_repo)
687 687 if has_lf_enabled:
688 688 c.lf_node = c.file.get_largefile_node()
689 689
690 690 c.file_source_page = 'true'
691 691 c.file_last_commit = c.file.last_commit
692 692
693 693 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
694 694
695 695 if not (c.file_size_too_big or c.file.is_binary):
696 696 if c.annotate: # annotation has precedence over renderer
697 697 c.annotated_lines = filenode_as_annotated_lines_tokens(
698 698 c.file
699 699 )
700 700 else:
701 701 c.renderer = (
702 702 c.renderer and h.renderer_from_filename(c.file.path)
703 703 )
704 704 if not c.renderer:
705 705 c.lines = filenode_as_lines_tokens(c.file)
706 706
707 707 _branch_name, _sha_commit_id, is_head = \
708 708 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
709 709 landing_ref=self.db_repo.landing_ref_name)
710 710 c.on_branch_head = is_head
711 711
712 712 branch = c.commit.branch if (
713 713 c.commit.branch and '/' not in c.commit.branch) else None
714 714 c.branch_or_raw_id = branch or c.commit.raw_id
715 715 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
716 716
717 717 author = c.file_last_commit.author
718 718 c.authors = [[
719 719 h.email(author),
720 720 h.person(author, 'username_or_name_or_email'),
721 721 1
722 722 ]]
723 723
724 724 else: # load tree content at path
725 725 c.file_source_page = 'false'
726 726 c.authors = []
727 727 # this loads a simple tree without metadata to speed things up
728 728 # later via ajax we call repo_nodetree_full and fetch whole
729 729 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
730 730
731 731 c.readme_data, c.readme_file = \
732 732 self._get_readme_data(self.db_repo, c.visual.default_renderer,
733 733 c.commit.raw_id, f_path)
734 734
735 735 except RepositoryError as e:
736 h.flash(safe_str(h.escape(e)), category='error')
736 h.flash(h.escape(safe_str(e)), category='error')
737 737 raise HTTPNotFound()
738 738
739 739 if self.request.environ.get('HTTP_X_PJAX'):
740 740 html = render('rhodecode:templates/files/files_pjax.mako',
741 741 self._get_template_context(c), self.request)
742 742 else:
743 743 html = render('rhodecode:templates/files/files.mako',
744 744 self._get_template_context(c), self.request)
745 745 return Response(html)
746 746
747 747 @HasRepoPermissionAnyDecorator(
748 748 'repository.read', 'repository.write', 'repository.admin')
749 749 def repo_files_annotated_previous(self):
750 750 self.load_default_context()
751 751
752 752 commit_id, f_path = self._get_commit_and_path()
753 753 commit = self._get_commit_or_redirect(commit_id)
754 754 prev_commit_id = commit.raw_id
755 755 line_anchor = self.request.GET.get('line_anchor')
756 756 is_file = False
757 757 try:
758 758 _file = commit.get_node(f_path)
759 759 is_file = _file.is_file()
760 760 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
761 761 pass
762 762
763 763 if is_file:
764 764 history = commit.get_path_history(f_path)
765 765 prev_commit_id = history[1].raw_id \
766 766 if len(history) > 1 else prev_commit_id
767 767 prev_url = h.route_path(
768 768 'repo_files:annotated', repo_name=self.db_repo_name,
769 769 commit_id=prev_commit_id, f_path=f_path,
770 770 _anchor='L{}'.format(line_anchor))
771 771
772 772 raise HTTPFound(prev_url)
773 773
774 774 @LoginRequired()
775 775 @HasRepoPermissionAnyDecorator(
776 776 'repository.read', 'repository.write', 'repository.admin')
777 777 def repo_nodetree_full(self):
778 778 """
779 779 Returns rendered html of file tree that contains commit date,
780 780 author, commit_id for the specified combination of
781 781 repo, commit_id and file path
782 782 """
783 783 c = self.load_default_context()
784 784
785 785 commit_id, f_path = self._get_commit_and_path()
786 786 commit = self._get_commit_or_redirect(commit_id)
787 787 try:
788 788 dir_node = commit.get_node(f_path)
789 789 except RepositoryError as e:
790 790 return Response('error: {}'.format(h.escape(safe_str(e))))
791 791
792 792 if dir_node.is_file():
793 793 return Response('')
794 794
795 795 c.file = dir_node
796 796 c.commit = commit
797 797 at_rev = self.request.GET.get('at')
798 798
799 799 html = self._get_tree_at_commit(
800 800 c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
801 801
802 802 return Response(html)
803 803
804 804 def _get_attachement_headers(self, f_path):
805 805 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
806 806 safe_path = f_name.replace('"', '\\"')
807 807 encoded_path = urllib.quote(f_name)
808 808
809 809 return "attachment; " \
810 810 "filename=\"{}\"; " \
811 811 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
812 812
813 813 @LoginRequired()
814 814 @HasRepoPermissionAnyDecorator(
815 815 'repository.read', 'repository.write', 'repository.admin')
816 816 def repo_file_raw(self):
817 817 """
818 818 Action for show as raw, some mimetypes are "rendered",
819 819 those include images, icons.
820 820 """
821 821 c = self.load_default_context()
822 822
823 823 commit_id, f_path = self._get_commit_and_path()
824 824 commit = self._get_commit_or_redirect(commit_id)
825 825 file_node = self._get_filenode_or_redirect(commit, f_path)
826 826
827 827 raw_mimetype_mapping = {
828 828 # map original mimetype to a mimetype used for "show as raw"
829 829 # you can also provide a content-disposition to override the
830 830 # default "attachment" disposition.
831 831 # orig_type: (new_type, new_dispo)
832 832
833 833 # show images inline:
834 834 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
835 835 # for example render an SVG with javascript inside or even render
836 836 # HTML.
837 837 'image/x-icon': ('image/x-icon', 'inline'),
838 838 'image/png': ('image/png', 'inline'),
839 839 'image/gif': ('image/gif', 'inline'),
840 840 'image/jpeg': ('image/jpeg', 'inline'),
841 841 'application/pdf': ('application/pdf', 'inline'),
842 842 }
843 843
844 844 mimetype = file_node.mimetype
845 845 try:
846 846 mimetype, disposition = raw_mimetype_mapping[mimetype]
847 847 except KeyError:
848 848 # we don't know anything special about this, handle it safely
849 849 if file_node.is_binary:
850 850 # do same as download raw for binary files
851 851 mimetype, disposition = 'application/octet-stream', 'attachment'
852 852 else:
853 853 # do not just use the original mimetype, but force text/plain,
854 854 # otherwise it would serve text/html and that might be unsafe.
855 855 # Note: underlying vcs library fakes text/plain mimetype if the
856 856 # mimetype can not be determined and it thinks it is not
857 857 # binary.This might lead to erroneous text display in some
858 858 # cases, but helps in other cases, like with text files
859 859 # without extension.
860 860 mimetype, disposition = 'text/plain', 'inline'
861 861
862 862 if disposition == 'attachment':
863 863 disposition = self._get_attachement_headers(f_path)
864 864
865 865 stream_content = file_node.stream_bytes()
866 866
867 867 response = Response(app_iter=stream_content)
868 868 response.content_disposition = disposition
869 869 response.content_type = mimetype
870 870
871 871 charset = self._get_default_encoding(c)
872 872 if charset:
873 873 response.charset = charset
874 874
875 875 return response
876 876
877 877 @LoginRequired()
878 878 @HasRepoPermissionAnyDecorator(
879 879 'repository.read', 'repository.write', 'repository.admin')
880 880 def repo_file_download(self):
881 881 c = self.load_default_context()
882 882
883 883 commit_id, f_path = self._get_commit_and_path()
884 884 commit = self._get_commit_or_redirect(commit_id)
885 885 file_node = self._get_filenode_or_redirect(commit, f_path)
886 886
887 887 if self.request.GET.get('lf'):
888 888 # only if lf get flag is passed, we download this file
889 889 # as LFS/Largefile
890 890 lf_node = file_node.get_largefile_node()
891 891 if lf_node:
892 892 # overwrite our pointer with the REAL large-file
893 893 file_node = lf_node
894 894
895 895 disposition = self._get_attachement_headers(f_path)
896 896
897 897 stream_content = file_node.stream_bytes()
898 898
899 899 response = Response(app_iter=stream_content)
900 900 response.content_disposition = disposition
901 901 response.content_type = file_node.mimetype
902 902
903 903 charset = self._get_default_encoding(c)
904 904 if charset:
905 905 response.charset = charset
906 906
907 907 return response
908 908
909 909 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
910 910
911 911 cache_seconds = safe_int(
912 912 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
913 913 cache_on = cache_seconds > 0
914 914 log.debug(
915 915 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
916 916 'with caching: %s[TTL: %ss]' % (
917 917 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
918 918
919 919 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
920 920 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
921 921
922 922 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
923 923 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
924 924 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
925 925 _repo_id, commit_id, f_path)
926 926 try:
927 927 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
928 928 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
929 929 log.exception(safe_str(e))
930 h.flash(safe_str(h.escape(e)), category='error')
930 h.flash(h.escape(safe_str(e)), category='error')
931 931 raise HTTPFound(h.route_path(
932 932 'repo_files', repo_name=self.db_repo_name,
933 933 commit_id='tip', f_path='/'))
934 934
935 935 return _d + _f
936 936
937 937 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id,
938 938 commit_id, f_path)
939 939 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
940 940
941 941 @LoginRequired()
942 942 @HasRepoPermissionAnyDecorator(
943 943 'repository.read', 'repository.write', 'repository.admin')
944 944 def repo_nodelist(self):
945 945 self.load_default_context()
946 946
947 947 commit_id, f_path = self._get_commit_and_path()
948 948 commit = self._get_commit_or_redirect(commit_id)
949 949
950 950 metadata = self._get_nodelist_at_commit(
951 951 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
952 952 return {'nodes': metadata}
953 953
954 954 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
955 955 items = []
956 956 for name, commit_id in branches_or_tags.items():
957 957 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
958 958 items.append((sym_ref, name, ref_type))
959 959 return items
960 960
961 961 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
962 962 return commit_id
963 963
964 964 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
965 965 return commit_id
966 966
967 967 # NOTE(dan): old code we used in "diff" mode compare
968 968 new_f_path = vcspath.join(name, f_path)
969 969 return u'%s@%s' % (new_f_path, commit_id)
970 970
971 971 def _get_node_history(self, commit_obj, f_path, commits=None):
972 972 """
973 973 get commit history for given node
974 974
975 975 :param commit_obj: commit to calculate history
976 976 :param f_path: path for node to calculate history for
977 977 :param commits: if passed don't calculate history and take
978 978 commits defined in this list
979 979 """
980 980 _ = self.request.translate
981 981
982 982 # calculate history based on tip
983 983 tip = self.rhodecode_vcs_repo.get_commit()
984 984 if commits is None:
985 985 pre_load = ["author", "branch"]
986 986 try:
987 987 commits = tip.get_path_history(f_path, pre_load=pre_load)
988 988 except (NodeDoesNotExistError, CommitError):
989 989 # this node is not present at tip!
990 990 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
991 991
992 992 history = []
993 993 commits_group = ([], _("Changesets"))
994 994 for commit in commits:
995 995 branch = ' (%s)' % commit.branch if commit.branch else ''
996 996 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
997 997 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
998 998 history.append(commits_group)
999 999
1000 1000 symbolic_reference = self._symbolic_reference
1001 1001
1002 1002 if self.rhodecode_vcs_repo.alias == 'svn':
1003 1003 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
1004 1004 f_path, self.rhodecode_vcs_repo)
1005 1005 if adjusted_f_path != f_path:
1006 1006 log.debug(
1007 1007 'Recognized svn tag or branch in file "%s", using svn '
1008 1008 'specific symbolic references', f_path)
1009 1009 f_path = adjusted_f_path
1010 1010 symbolic_reference = self._symbolic_reference_svn
1011 1011
1012 1012 branches = self._create_references(
1013 1013 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1014 1014 branches_group = (branches, _("Branches"))
1015 1015
1016 1016 tags = self._create_references(
1017 1017 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1018 1018 tags_group = (tags, _("Tags"))
1019 1019
1020 1020 history.append(branches_group)
1021 1021 history.append(tags_group)
1022 1022
1023 1023 return history, commits
1024 1024
1025 1025 @LoginRequired()
1026 1026 @HasRepoPermissionAnyDecorator(
1027 1027 'repository.read', 'repository.write', 'repository.admin')
1028 1028 def repo_file_history(self):
1029 1029 self.load_default_context()
1030 1030
1031 1031 commit_id, f_path = self._get_commit_and_path()
1032 1032 commit = self._get_commit_or_redirect(commit_id)
1033 1033 file_node = self._get_filenode_or_redirect(commit, f_path)
1034 1034
1035 1035 if file_node.is_file():
1036 1036 file_history, _hist = self._get_node_history(commit, f_path)
1037 1037
1038 1038 res = []
1039 1039 for section_items, section in file_history:
1040 1040 items = []
1041 1041 for obj_id, obj_text, obj_type in section_items:
1042 1042 at_rev = ''
1043 1043 if obj_type in ['branch', 'bookmark', 'tag']:
1044 1044 at_rev = obj_text
1045 1045 entry = {
1046 1046 'id': obj_id,
1047 1047 'text': obj_text,
1048 1048 'type': obj_type,
1049 1049 'at_rev': at_rev
1050 1050 }
1051 1051
1052 1052 items.append(entry)
1053 1053
1054 1054 res.append({
1055 1055 'text': section,
1056 1056 'children': items
1057 1057 })
1058 1058
1059 1059 data = {
1060 1060 'more': False,
1061 1061 'results': res
1062 1062 }
1063 1063 return data
1064 1064
1065 1065 log.warning('Cannot fetch history for directory')
1066 1066 raise HTTPBadRequest()
1067 1067
1068 1068 @LoginRequired()
1069 1069 @HasRepoPermissionAnyDecorator(
1070 1070 'repository.read', 'repository.write', 'repository.admin')
1071 1071 def repo_file_authors(self):
1072 1072 c = self.load_default_context()
1073 1073
1074 1074 commit_id, f_path = self._get_commit_and_path()
1075 1075 commit = self._get_commit_or_redirect(commit_id)
1076 1076 file_node = self._get_filenode_or_redirect(commit, f_path)
1077 1077
1078 1078 if not file_node.is_file():
1079 1079 raise HTTPBadRequest()
1080 1080
1081 1081 c.file_last_commit = file_node.last_commit
1082 1082 if self.request.GET.get('annotate') == '1':
1083 1083 # use _hist from annotation if annotation mode is on
1084 1084 commit_ids = set(x[1] for x in file_node.annotate)
1085 1085 _hist = (
1086 1086 self.rhodecode_vcs_repo.get_commit(commit_id)
1087 1087 for commit_id in commit_ids)
1088 1088 else:
1089 1089 _f_history, _hist = self._get_node_history(commit, f_path)
1090 1090 c.file_author = False
1091 1091
1092 1092 unique = collections.OrderedDict()
1093 1093 for commit in _hist:
1094 1094 author = commit.author
1095 1095 if author not in unique:
1096 1096 unique[commit.author] = [
1097 1097 h.email(author),
1098 1098 h.person(author, 'username_or_name_or_email'),
1099 1099 1 # counter
1100 1100 ]
1101 1101
1102 1102 else:
1103 1103 # increase counter
1104 1104 unique[commit.author][2] += 1
1105 1105
1106 1106 c.authors = [val for val in unique.values()]
1107 1107
1108 1108 return self._get_template_context(c)
1109 1109
1110 1110 @LoginRequired()
1111 1111 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1112 1112 def repo_files_check_head(self):
1113 1113 self.load_default_context()
1114 1114
1115 1115 commit_id, f_path = self._get_commit_and_path()
1116 1116 _branch_name, _sha_commit_id, is_head = \
1117 1117 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1118 1118 landing_ref=self.db_repo.landing_ref_name)
1119 1119
1120 1120 new_path = self.request.POST.get('path')
1121 1121 operation = self.request.POST.get('operation')
1122 1122 path_exist = ''
1123 1123
1124 1124 if new_path and operation in ['create', 'upload']:
1125 1125 new_f_path = os.path.join(f_path.lstrip('/'), new_path)
1126 1126 try:
1127 1127 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1128 1128 # NOTE(dan): construct whole path without leading /
1129 1129 file_node = commit_obj.get_node(new_f_path)
1130 1130 if file_node is not None:
1131 1131 path_exist = new_f_path
1132 1132 except EmptyRepositoryError:
1133 1133 pass
1134 1134 except Exception:
1135 1135 pass
1136 1136
1137 1137 return {
1138 1138 'branch': _branch_name,
1139 1139 'sha': _sha_commit_id,
1140 1140 'is_head': is_head,
1141 1141 'path_exists': path_exist
1142 1142 }
1143 1143
1144 1144 @LoginRequired()
1145 1145 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1146 1146 def repo_files_remove_file(self):
1147 1147 _ = self.request.translate
1148 1148 c = self.load_default_context()
1149 1149 commit_id, f_path = self._get_commit_and_path()
1150 1150
1151 1151 self._ensure_not_locked()
1152 1152 _branch_name, _sha_commit_id, is_head = \
1153 1153 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1154 1154 landing_ref=self.db_repo.landing_ref_name)
1155 1155
1156 1156 self.forbid_non_head(is_head, f_path)
1157 1157 self.check_branch_permission(_branch_name)
1158 1158
1159 1159 c.commit = self._get_commit_or_redirect(commit_id)
1160 1160 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1161 1161
1162 1162 c.default_message = _(
1163 1163 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1164 1164 c.f_path = f_path
1165 1165
1166 1166 return self._get_template_context(c)
1167 1167
1168 1168 @LoginRequired()
1169 1169 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1170 1170 @CSRFRequired()
1171 1171 def repo_files_delete_file(self):
1172 1172 _ = self.request.translate
1173 1173
1174 1174 c = self.load_default_context()
1175 1175 commit_id, f_path = self._get_commit_and_path()
1176 1176
1177 1177 self._ensure_not_locked()
1178 1178 _branch_name, _sha_commit_id, is_head = \
1179 1179 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1180 1180 landing_ref=self.db_repo.landing_ref_name)
1181 1181
1182 1182 self.forbid_non_head(is_head, f_path)
1183 1183 self.check_branch_permission(_branch_name)
1184 1184
1185 1185 c.commit = self._get_commit_or_redirect(commit_id)
1186 1186 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1187 1187
1188 1188 c.default_message = _(
1189 1189 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1190 1190 c.f_path = f_path
1191 1191 node_path = f_path
1192 1192 author = self._rhodecode_db_user.full_contact
1193 1193 message = self.request.POST.get('message') or c.default_message
1194 1194 try:
1195 1195 nodes = {
1196 1196 node_path: {
1197 1197 'content': ''
1198 1198 }
1199 1199 }
1200 1200 ScmModel().delete_nodes(
1201 1201 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1202 1202 message=message,
1203 1203 nodes=nodes,
1204 1204 parent_commit=c.commit,
1205 1205 author=author,
1206 1206 )
1207 1207
1208 1208 h.flash(
1209 1209 _('Successfully deleted file `{}`').format(
1210 1210 h.escape(f_path)), category='success')
1211 1211 except Exception:
1212 1212 log.exception('Error during commit operation')
1213 1213 h.flash(_('Error occurred during commit'), category='error')
1214 1214 raise HTTPFound(
1215 1215 h.route_path('repo_commit', repo_name=self.db_repo_name,
1216 1216 commit_id='tip'))
1217 1217
1218 1218 @LoginRequired()
1219 1219 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1220 1220 def repo_files_edit_file(self):
1221 1221 _ = self.request.translate
1222 1222 c = self.load_default_context()
1223 1223 commit_id, f_path = self._get_commit_and_path()
1224 1224
1225 1225 self._ensure_not_locked()
1226 1226 _branch_name, _sha_commit_id, is_head = \
1227 1227 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1228 1228 landing_ref=self.db_repo.landing_ref_name)
1229 1229
1230 1230 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1231 1231 self.check_branch_permission(_branch_name, commit_id=commit_id)
1232 1232
1233 1233 c.commit = self._get_commit_or_redirect(commit_id)
1234 1234 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1235 1235
1236 1236 if c.file.is_binary:
1237 1237 files_url = h.route_path(
1238 1238 'repo_files',
1239 1239 repo_name=self.db_repo_name,
1240 1240 commit_id=c.commit.raw_id, f_path=f_path)
1241 1241 raise HTTPFound(files_url)
1242 1242
1243 1243 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1244 1244 c.f_path = f_path
1245 1245
1246 1246 return self._get_template_context(c)
1247 1247
1248 1248 @LoginRequired()
1249 1249 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1250 1250 @CSRFRequired()
1251 1251 def repo_files_update_file(self):
1252 1252 _ = self.request.translate
1253 1253 c = self.load_default_context()
1254 1254 commit_id, f_path = self._get_commit_and_path()
1255 1255
1256 1256 self._ensure_not_locked()
1257 1257
1258 1258 c.commit = self._get_commit_or_redirect(commit_id)
1259 1259 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1260 1260
1261 1261 if c.file.is_binary:
1262 1262 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1263 1263 commit_id=c.commit.raw_id, f_path=f_path))
1264 1264
1265 1265 _branch_name, _sha_commit_id, is_head = \
1266 1266 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1267 1267 landing_ref=self.db_repo.landing_ref_name)
1268 1268
1269 1269 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1270 1270 self.check_branch_permission(_branch_name, commit_id=commit_id)
1271 1271
1272 1272 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1273 1273 c.f_path = f_path
1274 1274
1275 1275 old_content = c.file.content
1276 1276 sl = old_content.splitlines(1)
1277 1277 first_line = sl[0] if sl else ''
1278 1278
1279 1279 r_post = self.request.POST
1280 1280 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1281 1281 line_ending_mode = detect_mode(first_line, 0)
1282 1282 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1283 1283
1284 1284 message = r_post.get('message') or c.default_message
1285 1285 org_node_path = c.file.unicode_path
1286 1286 filename = r_post['filename']
1287 1287
1288 1288 root_path = c.file.dir_path
1289 1289 pure_path = self.create_pure_path(root_path, filename)
1290 1290 node_path = safe_unicode(bytes(pure_path))
1291 1291
1292 1292 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1293 1293 commit_id=commit_id)
1294 1294 if content == old_content and node_path == org_node_path:
1295 1295 h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
1296 1296 category='warning')
1297 1297 raise HTTPFound(default_redirect_url)
1298 1298
1299 1299 try:
1300 1300 mapping = {
1301 1301 org_node_path: {
1302 1302 'org_filename': org_node_path,
1303 1303 'filename': node_path,
1304 1304 'content': content,
1305 1305 'lexer': '',
1306 1306 'op': 'mod',
1307 1307 'mode': c.file.mode
1308 1308 }
1309 1309 }
1310 1310
1311 1311 commit = ScmModel().update_nodes(
1312 1312 user=self._rhodecode_db_user.user_id,
1313 1313 repo=self.db_repo,
1314 1314 message=message,
1315 1315 nodes=mapping,
1316 1316 parent_commit=c.commit,
1317 1317 )
1318 1318
1319 1319 h.flash(_('Successfully committed changes to file `{}`').format(
1320 1320 h.escape(f_path)), category='success')
1321 1321 default_redirect_url = h.route_path(
1322 1322 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1323 1323
1324 1324 except Exception:
1325 1325 log.exception('Error occurred during commit')
1326 1326 h.flash(_('Error occurred during commit'), category='error')
1327 1327
1328 1328 raise HTTPFound(default_redirect_url)
1329 1329
1330 1330 @LoginRequired()
1331 1331 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1332 1332 def repo_files_add_file(self):
1333 1333 _ = self.request.translate
1334 1334 c = self.load_default_context()
1335 1335 commit_id, f_path = self._get_commit_and_path()
1336 1336
1337 1337 self._ensure_not_locked()
1338 1338
1339 1339 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1340 1340 if c.commit is None:
1341 1341 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1342 1342
1343 1343 if self.rhodecode_vcs_repo.is_empty():
1344 1344 # for empty repository we cannot check for current branch, we rely on
1345 1345 # c.commit.branch instead
1346 1346 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1347 1347 else:
1348 1348 _branch_name, _sha_commit_id, is_head = \
1349 1349 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1350 1350 landing_ref=self.db_repo.landing_ref_name)
1351 1351
1352 1352 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1353 1353 self.check_branch_permission(_branch_name, commit_id=commit_id)
1354 1354
1355 1355 c.default_message = (_('Added file via RhodeCode Enterprise'))
1356 1356 c.f_path = f_path.lstrip('/') # ensure not relative path
1357 1357
1358 1358 return self._get_template_context(c)
1359 1359
1360 1360 @LoginRequired()
1361 1361 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1362 1362 @CSRFRequired()
1363 1363 def repo_files_create_file(self):
1364 1364 _ = self.request.translate
1365 1365 c = self.load_default_context()
1366 1366 commit_id, f_path = self._get_commit_and_path()
1367 1367
1368 1368 self._ensure_not_locked()
1369 1369
1370 1370 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1371 1371 if c.commit is None:
1372 1372 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1373 1373
1374 1374 # calculate redirect URL
1375 1375 if self.rhodecode_vcs_repo.is_empty():
1376 1376 default_redirect_url = h.route_path(
1377 1377 'repo_summary', repo_name=self.db_repo_name)
1378 1378 else:
1379 1379 default_redirect_url = h.route_path(
1380 1380 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1381 1381
1382 1382 if self.rhodecode_vcs_repo.is_empty():
1383 1383 # for empty repository we cannot check for current branch, we rely on
1384 1384 # c.commit.branch instead
1385 1385 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1386 1386 else:
1387 1387 _branch_name, _sha_commit_id, is_head = \
1388 1388 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1389 1389 landing_ref=self.db_repo.landing_ref_name)
1390 1390
1391 1391 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1392 1392 self.check_branch_permission(_branch_name, commit_id=commit_id)
1393 1393
1394 1394 c.default_message = (_('Added file via RhodeCode Enterprise'))
1395 1395 c.f_path = f_path
1396 1396
1397 1397 r_post = self.request.POST
1398 1398 message = r_post.get('message') or c.default_message
1399 1399 filename = r_post.get('filename')
1400 1400 unix_mode = 0
1401 1401 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1402 1402
1403 1403 if not filename:
1404 1404 # If there's no commit, redirect to repo summary
1405 1405 if type(c.commit) is EmptyCommit:
1406 1406 redirect_url = h.route_path(
1407 1407 'repo_summary', repo_name=self.db_repo_name)
1408 1408 else:
1409 1409 redirect_url = default_redirect_url
1410 1410 h.flash(_('No filename specified'), category='warning')
1411 1411 raise HTTPFound(redirect_url)
1412 1412
1413 1413 root_path = f_path
1414 1414 pure_path = self.create_pure_path(root_path, filename)
1415 1415 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1416 1416
1417 1417 author = self._rhodecode_db_user.full_contact
1418 1418 nodes = {
1419 1419 node_path: {
1420 1420 'content': content
1421 1421 }
1422 1422 }
1423 1423
1424 1424 try:
1425 1425
1426 1426 commit = ScmModel().create_nodes(
1427 1427 user=self._rhodecode_db_user.user_id,
1428 1428 repo=self.db_repo,
1429 1429 message=message,
1430 1430 nodes=nodes,
1431 1431 parent_commit=c.commit,
1432 1432 author=author,
1433 1433 )
1434 1434
1435 1435 h.flash(_('Successfully committed new file `{}`').format(
1436 1436 h.escape(node_path)), category='success')
1437 1437
1438 1438 default_redirect_url = h.route_path(
1439 1439 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1440 1440
1441 1441 except NonRelativePathError:
1442 1442 log.exception('Non Relative path found')
1443 1443 h.flash(_('The location specified must be a relative path and must not '
1444 1444 'contain .. in the path'), category='warning')
1445 1445 raise HTTPFound(default_redirect_url)
1446 1446 except (NodeError, NodeAlreadyExistsError) as e:
1447 h.flash(_(h.escape(e)), category='error')
1447 h.flash(h.escape(safe_str(e)), category='error')
1448 1448 except Exception:
1449 1449 log.exception('Error occurred during commit')
1450 1450 h.flash(_('Error occurred during commit'), category='error')
1451 1451
1452 1452 raise HTTPFound(default_redirect_url)
1453 1453
1454 1454 @LoginRequired()
1455 1455 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1456 1456 @CSRFRequired()
1457 1457 def repo_files_upload_file(self):
1458 1458 _ = self.request.translate
1459 1459 c = self.load_default_context()
1460 1460 commit_id, f_path = self._get_commit_and_path()
1461 1461
1462 1462 self._ensure_not_locked()
1463 1463
1464 1464 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1465 1465 if c.commit is None:
1466 1466 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1467 1467
1468 1468 # calculate redirect URL
1469 1469 if self.rhodecode_vcs_repo.is_empty():
1470 1470 default_redirect_url = h.route_path(
1471 1471 'repo_summary', repo_name=self.db_repo_name)
1472 1472 else:
1473 1473 default_redirect_url = h.route_path(
1474 1474 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1475 1475
1476 1476 if self.rhodecode_vcs_repo.is_empty():
1477 1477 # for empty repository we cannot check for current branch, we rely on
1478 1478 # c.commit.branch instead
1479 1479 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1480 1480 else:
1481 1481 _branch_name, _sha_commit_id, is_head = \
1482 1482 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1483 1483 landing_ref=self.db_repo.landing_ref_name)
1484 1484
1485 1485 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1486 1486 if error:
1487 1487 return {
1488 1488 'error': error,
1489 1489 'redirect_url': default_redirect_url
1490 1490 }
1491 1491 error = self.check_branch_permission(_branch_name, json_mode=True)
1492 1492 if error:
1493 1493 return {
1494 1494 'error': error,
1495 1495 'redirect_url': default_redirect_url
1496 1496 }
1497 1497
1498 1498 c.default_message = (_('Uploaded file via RhodeCode Enterprise'))
1499 1499 c.f_path = f_path
1500 1500
1501 1501 r_post = self.request.POST
1502 1502
1503 1503 message = c.default_message
1504 1504 user_message = r_post.getall('message')
1505 1505 if isinstance(user_message, list) and user_message:
1506 1506 # we take the first from duplicated results if it's not empty
1507 1507 message = user_message[0] if user_message[0] else message
1508 1508
1509 1509 nodes = {}
1510 1510
1511 1511 for file_obj in r_post.getall('files_upload') or []:
1512 1512 content = file_obj.file
1513 1513 filename = file_obj.filename
1514 1514
1515 1515 root_path = f_path
1516 1516 pure_path = self.create_pure_path(root_path, filename)
1517 1517 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1518 1518
1519 1519 nodes[node_path] = {
1520 1520 'content': content
1521 1521 }
1522 1522
1523 1523 if not nodes:
1524 1524 error = 'missing files'
1525 1525 return {
1526 1526 'error': error,
1527 1527 'redirect_url': default_redirect_url
1528 1528 }
1529 1529
1530 1530 author = self._rhodecode_db_user.full_contact
1531 1531
1532 1532 try:
1533 1533 commit = ScmModel().create_nodes(
1534 1534 user=self._rhodecode_db_user.user_id,
1535 1535 repo=self.db_repo,
1536 1536 message=message,
1537 1537 nodes=nodes,
1538 1538 parent_commit=c.commit,
1539 1539 author=author,
1540 1540 )
1541 1541 if len(nodes) == 1:
1542 1542 flash_message = _('Successfully committed {} new files').format(len(nodes))
1543 1543 else:
1544 1544 flash_message = _('Successfully committed 1 new file')
1545 1545
1546 1546 h.flash(flash_message, category='success')
1547 1547
1548 1548 default_redirect_url = h.route_path(
1549 1549 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1550 1550
1551 1551 except NonRelativePathError:
1552 1552 log.exception('Non Relative path found')
1553 1553 error = _('The location specified must be a relative path and must not '
1554 1554 'contain .. in the path')
1555 1555 h.flash(error, category='warning')
1556 1556
1557 1557 return {
1558 1558 'error': error,
1559 1559 'redirect_url': default_redirect_url
1560 1560 }
1561 1561 except (NodeError, NodeAlreadyExistsError) as e:
1562 1562 error = h.escape(e)
1563 1563 h.flash(error, category='error')
1564 1564
1565 1565 return {
1566 1566 'error': error,
1567 1567 'redirect_url': default_redirect_url
1568 1568 }
1569 1569 except Exception:
1570 1570 log.exception('Error occurred during commit')
1571 1571 error = _('Error occurred during commit')
1572 1572 h.flash(error, category='error')
1573 1573 return {
1574 1574 'error': error,
1575 1575 'redirect_url': default_redirect_url
1576 1576 }
1577 1577
1578 1578 return {
1579 1579 'error': None,
1580 1580 'redirect_url': default_redirect_url
1581 1581 }
@@ -1,389 +1,390 b''
1 1 import sys
2 2 import threading
3 3 import weakref
4 4 from base64 import b64encode
5 5 from logging import getLogger
6 6 from os import urandom
7 7
8 8 from redis import StrictRedis
9 9
10 10 __version__ = '3.7.0'
11 11
12 12 loggers = {
13 k: getLogger("rhodecode" + ".".join((__name__, k)))
13 k: getLogger("rhodecode." + ".".join((__name__, k)))
14 14 for k in [
15 15 "acquire",
16 16 "refresh.thread.start",
17 17 "refresh.thread.stop",
18 18 "refresh.thread.exit",
19 19 "refresh.start",
20 20 "refresh.shutdown",
21 21 "refresh.exit",
22 22 "release",
23 23 ]
24 24 }
25 25
26 26 PY3 = sys.version_info[0] == 3
27 27
28 28 if PY3:
29 29 text_type = str
30 30 binary_type = bytes
31 31 else:
32 32 text_type = unicode # noqa
33 33 binary_type = str
34 34
35 35
36 36 # Check if the id match. If not, return an error code.
37 37 UNLOCK_SCRIPT = b"""
38 38 if redis.call("get", KEYS[1]) ~= ARGV[1] then
39 39 return 1
40 40 else
41 41 redis.call("del", KEYS[2])
42 42 redis.call("lpush", KEYS[2], 1)
43 43 redis.call("pexpire", KEYS[2], ARGV[2])
44 44 redis.call("del", KEYS[1])
45 45 return 0
46 46 end
47 47 """
48 48
49 49 # Covers both cases when key doesn't exist and doesn't equal to lock's id
50 50 EXTEND_SCRIPT = b"""
51 51 if redis.call("get", KEYS[1]) ~= ARGV[1] then
52 52 return 1
53 53 elseif redis.call("ttl", KEYS[1]) < 0 then
54 54 return 2
55 55 else
56 56 redis.call("expire", KEYS[1], ARGV[2])
57 57 return 0
58 58 end
59 59 """
60 60
61 61 RESET_SCRIPT = b"""
62 62 redis.call('del', KEYS[2])
63 63 redis.call('lpush', KEYS[2], 1)
64 64 redis.call('pexpire', KEYS[2], ARGV[2])
65 65 return redis.call('del', KEYS[1])
66 66 """
67 67
68 68 RESET_ALL_SCRIPT = b"""
69 69 local locks = redis.call('keys', 'lock:*')
70 70 local signal
71 71 for _, lock in pairs(locks) do
72 72 signal = 'lock-signal:' .. string.sub(lock, 6)
73 73 redis.call('del', signal)
74 74 redis.call('lpush', signal, 1)
75 75 redis.call('expire', signal, 1)
76 76 redis.call('del', lock)
77 77 end
78 78 return #locks
79 79 """
80 80
81 81
82 82 class AlreadyAcquired(RuntimeError):
83 83 pass
84 84
85 85
86 86 class NotAcquired(RuntimeError):
87 87 pass
88 88
89 89
90 90 class AlreadyStarted(RuntimeError):
91 91 pass
92 92
93 93
94 94 class TimeoutNotUsable(RuntimeError):
95 95 pass
96 96
97 97
98 98 class InvalidTimeout(RuntimeError):
99 99 pass
100 100
101 101
102 102 class TimeoutTooLarge(RuntimeError):
103 103 pass
104 104
105 105
106 106 class NotExpirable(RuntimeError):
107 107 pass
108 108
109 109
110 110 class Lock(object):
111 111 """
112 112 A Lock context manager implemented via redis SETNX/BLPOP.
113 113 """
114 114 unlock_script = None
115 115 extend_script = None
116 116 reset_script = None
117 117 reset_all_script = None
118 118
119 119 def __init__(self, redis_client, name, expire=None, id=None, auto_renewal=False, strict=True, signal_expire=1000):
120 120 """
121 121 :param redis_client:
122 122 An instance of :class:`~StrictRedis`.
123 123 :param name:
124 124 The name (redis key) the lock should have.
125 125 :param expire:
126 126 The lock expiry time in seconds. If left at the default (None)
127 127 the lock will not expire.
128 128 :param id:
129 129 The ID (redis value) the lock should have. A random value is
130 130 generated when left at the default.
131 131
132 132 Note that if you specify this then the lock is marked as "held". Acquires
133 133 won't be possible.
134 134 :param auto_renewal:
135 135 If set to ``True``, Lock will automatically renew the lock so that it
136 136 doesn't expire for as long as the lock is held (acquire() called
137 137 or running in a context manager).
138 138
139 139 Implementation note: Renewal will happen using a daemon thread with
140 140 an interval of ``expire*2/3``. If wishing to use a different renewal
141 141 time, subclass Lock, call ``super().__init__()`` then set
142 142 ``self._lock_renewal_interval`` to your desired interval.
143 143 :param strict:
144 144 If set ``True`` then the ``redis_client`` needs to be an instance of ``redis.StrictRedis``.
145 145 :param signal_expire:
146 146 Advanced option to override signal list expiration in milliseconds. Increase it for very slow clients. Default: ``1000``.
147 147 """
148 148 if strict and not isinstance(redis_client, StrictRedis):
149 149 raise ValueError("redis_client must be instance of StrictRedis. "
150 150 "Use strict=False if you know what you're doing.")
151 151 if auto_renewal and expire is None:
152 152 raise ValueError("Expire may not be None when auto_renewal is set")
153 153
154 154 self._client = redis_client
155 155
156 156 if expire:
157 157 expire = int(expire)
158 158 if expire < 0:
159 159 raise ValueError("A negative expire is not acceptable.")
160 160 else:
161 161 expire = None
162 162 self._expire = expire
163 163
164 164 self._signal_expire = signal_expire
165 165 if id is None:
166 166 self._id = b64encode(urandom(18)).decode('ascii')
167 167 elif isinstance(id, binary_type):
168 168 try:
169 169 self._id = id.decode('ascii')
170 170 except UnicodeDecodeError:
171 171 self._id = b64encode(id).decode('ascii')
172 172 elif isinstance(id, text_type):
173 173 self._id = id
174 174 else:
175 175 raise TypeError("Incorrect type for `id`. Must be bytes/str not %s." % type(id))
176 176 self._name = 'lock:' + name
177 177 self._signal = 'lock-signal:' + name
178 178 self._lock_renewal_interval = (float(expire) * 2 / 3
179 179 if auto_renewal
180 180 else None)
181 181 self._lock_renewal_thread = None
182 182
183 183 self.register_scripts(redis_client)
184 184
185 185 @classmethod
186 186 def register_scripts(cls, redis_client):
187 187 global reset_all_script
188 188 if reset_all_script is None:
189 189 reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
190 190 cls.unlock_script = redis_client.register_script(UNLOCK_SCRIPT)
191 191 cls.extend_script = redis_client.register_script(EXTEND_SCRIPT)
192 192 cls.reset_script = redis_client.register_script(RESET_SCRIPT)
193 193 cls.reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
194 194
195 195 @property
196 196 def _held(self):
197 197 return self.id == self.get_owner_id()
198 198
199 199 def reset(self):
200 200 """
201 201 Forcibly deletes the lock. Use this with care.
202 202 """
203 203 self.reset_script(client=self._client, keys=(self._name, self._signal), args=(self.id, self._signal_expire))
204 204
205 205 @property
206 206 def id(self):
207 207 return self._id
208 208
209 209 def get_owner_id(self):
210 210 owner_id = self._client.get(self._name)
211 211 if isinstance(owner_id, binary_type):
212 212 owner_id = owner_id.decode('ascii', 'replace')
213 213 return owner_id
214 214
215 215 def acquire(self, blocking=True, timeout=None):
216 216 """
217 217 :param blocking:
218 218 Boolean value specifying whether lock should be blocking or not.
219 219 :param timeout:
220 220 An integer value specifying the maximum number of seconds to block.
221 221 """
222 222 logger = loggers["acquire"]
223 223
224 logger.debug("Getting %r ...", self._name)
224 logger.debug("Getting acquire on %r ...", self._name)
225 225
226 226 if self._held:
227 raise AlreadyAcquired("Already acquired from this Lock instance.")
227 owner_id = self.get_owner_id()
228 raise AlreadyAcquired("Already acquired from this Lock instance. Lock id: {}".format(owner_id))
228 229
229 230 if not blocking and timeout is not None:
230 231 raise TimeoutNotUsable("Timeout cannot be used if blocking=False")
231 232
232 233 if timeout:
233 234 timeout = int(timeout)
234 235 if timeout < 0:
235 236 raise InvalidTimeout("Timeout (%d) cannot be less than or equal to 0" % timeout)
236 237
237 238 if self._expire and not self._lock_renewal_interval and timeout > self._expire:
238 239 raise TimeoutTooLarge("Timeout (%d) cannot be greater than expire (%d)" % (timeout, self._expire))
239 240
240 241 busy = True
241 242 blpop_timeout = timeout or self._expire or 0
242 243 timed_out = False
243 244 while busy:
244 245 busy = not self._client.set(self._name, self._id, nx=True, ex=self._expire)
245 246 if busy:
246 247 if timed_out:
247 248 return False
248 249 elif blocking:
249 250 timed_out = not self._client.blpop(self._signal, blpop_timeout) and timeout
250 251 else:
251 252 logger.warning("Failed to get %r.", self._name)
252 253 return False
253 254
254 255 logger.info("Got lock for %r.", self._name)
255 256 if self._lock_renewal_interval is not None:
256 257 self._start_lock_renewer()
257 258 return True
258 259
259 260 def extend(self, expire=None):
260 261 """Extends expiration time of the lock.
261 262
262 263 :param expire:
263 264 New expiration time. If ``None`` - `expire` provided during
264 265 lock initialization will be taken.
265 266 """
266 267 if expire:
267 268 expire = int(expire)
268 269 if expire < 0:
269 270 raise ValueError("A negative expire is not acceptable.")
270 271 elif self._expire is not None:
271 272 expire = self._expire
272 273 else:
273 274 raise TypeError(
274 275 "To extend a lock 'expire' must be provided as an "
275 276 "argument to extend() method or at initialization time."
276 277 )
277 278
278 279 error = self.extend_script(client=self._client, keys=(self._name, self._signal), args=(self._id, expire))
279 280 if error == 1:
280 281 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
281 282 elif error == 2:
282 283 raise NotExpirable("Lock %s has no assigned expiration time" % self._name)
283 284 elif error:
284 285 raise RuntimeError("Unsupported error code %s from EXTEND script" % error)
285 286
286 287 @staticmethod
287 288 def _lock_renewer(lockref, interval, stop):
288 289 """
289 290 Renew the lock key in redis every `interval` seconds for as long
290 291 as `self._lock_renewal_thread.should_exit` is False.
291 292 """
292 293 while not stop.wait(timeout=interval):
293 294 loggers["refresh.thread.start"].debug("Refreshing lock")
294 295 lock = lockref()
295 296 if lock is None:
296 297 loggers["refresh.thread.stop"].debug(
297 298 "The lock no longer exists, stopping lock refreshing"
298 299 )
299 300 break
300 301 lock.extend(expire=lock._expire)
301 302 del lock
302 303 loggers["refresh.thread.exit"].debug("Exit requested, stopping lock refreshing")
303 304
304 305 def _start_lock_renewer(self):
305 306 """
306 307 Starts the lock refresher thread.
307 308 """
308 309 if self._lock_renewal_thread is not None:
309 310 raise AlreadyStarted("Lock refresh thread already started")
310 311
311 312 loggers["refresh.start"].debug(
312 313 "Starting thread to refresh lock every %s seconds",
313 314 self._lock_renewal_interval
314 315 )
315 316 self._lock_renewal_stop = threading.Event()
316 317 self._lock_renewal_thread = threading.Thread(
317 318 group=None,
318 319 target=self._lock_renewer,
319 320 kwargs={'lockref': weakref.ref(self),
320 321 'interval': self._lock_renewal_interval,
321 322 'stop': self._lock_renewal_stop}
322 323 )
323 324 self._lock_renewal_thread.setDaemon(True)
324 325 self._lock_renewal_thread.start()
325 326
326 327 def _stop_lock_renewer(self):
327 328 """
328 329 Stop the lock renewer.
329 330
330 331 This signals the renewal thread and waits for its exit.
331 332 """
332 333 if self._lock_renewal_thread is None or not self._lock_renewal_thread.is_alive():
333 334 return
334 335 loggers["refresh.shutdown"].debug("Signalling the lock refresher to stop")
335 336 self._lock_renewal_stop.set()
336 337 self._lock_renewal_thread.join()
337 338 self._lock_renewal_thread = None
338 339 loggers["refresh.exit"].debug("Lock refresher has stopped")
339 340
340 341 def __enter__(self):
341 342 acquired = self.acquire(blocking=True)
342 343 assert acquired, "Lock wasn't acquired, but blocking=True"
343 344 return self
344 345
345 346 def __exit__(self, exc_type=None, exc_value=None, traceback=None):
346 347 self.release()
347 348
348 349 def release(self):
349 350 """Releases the lock, that was acquired with the same object.
350 351
351 352 .. note::
352 353
353 354 If you want to release a lock that you acquired in a different place you have two choices:
354 355
355 356 * Use ``Lock("name", id=id_from_other_place).release()``
356 357 * Use ``Lock("name").reset()``
357 358 """
358 359 if self._lock_renewal_thread is not None:
359 360 self._stop_lock_renewer()
360 361 loggers["release"].debug("Releasing %r.", self._name)
361 362 error = self.unlock_script(client=self._client, keys=(self._name, self._signal), args=(self._id, self._signal_expire))
362 363 if error == 1:
363 364 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
364 365 elif error:
365 366 raise RuntimeError("Unsupported error code %s from EXTEND script." % error)
366 367
367 368 def locked(self):
368 369 """
369 370 Return true if the lock is acquired.
370 371
371 372 Checks that lock with same name already exists. This method returns true, even if
372 373 lock have another id.
373 374 """
374 375 return self._client.exists(self._name) == 1
375 376
376 377
377 378 reset_all_script = None
378 379
379 380
380 381 def reset_all(redis_client):
381 382 """
382 383 Forcibly deletes all locks if its remains (like a crash reason). Use this with care.
383 384
384 385 :param redis_client:
385 386 An instance of :class:`~StrictRedis`.
386 387 """
387 388 Lock.register_scripts(redis_client)
388 389
389 390 reset_all_script(client=redis_client) # noqa
@@ -1,280 +1,283 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import gzip
22 22 import shutil
23 23 import logging
24 24 import tempfile
25 25 import urlparse
26 26
27 27 from webob.exc import HTTPNotFound
28 28
29 29 import rhodecode
30 30 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
31 31 from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT
32 32 from rhodecode.lib.middleware.simplehg import SimpleHg
33 33 from rhodecode.lib.middleware.simplesvn import SimpleSvn
34 34 from rhodecode.model.settings import VcsSettingsModel
35 35
36 36 log = logging.getLogger(__name__)
37 37
38 38 VCS_TYPE_KEY = '_rc_vcs_type'
39 39 VCS_TYPE_SKIP = '_rc_vcs_skip'
40 40
41 41
42 42 def is_git(environ):
43 43 """
44 44 Returns True if requests should be handled by GIT wsgi middleware
45 45 """
46 46 is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO'])
47 47 log.debug(
48 48 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'],
49 49 is_git_path is not None)
50 50
51 51 return is_git_path
52 52
53 53
54 54 def is_hg(environ):
55 55 """
56 56 Returns True if requests target is mercurial server - header
57 57 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
58 58 """
59 59 is_hg_path = False
60 60
61 61 http_accept = environ.get('HTTP_ACCEPT')
62 62
63 63 if http_accept and http_accept.startswith('application/mercurial'):
64 64 query = urlparse.parse_qs(environ['QUERY_STRING'])
65 65 if 'cmd' in query:
66 66 is_hg_path = True
67 67
68 68 log.debug(
69 69 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'],
70 70 is_hg_path)
71 71
72 72 return is_hg_path
73 73
74 74
75 75 def is_svn(environ):
76 76 """
77 77 Returns True if requests target is Subversion server
78 78 """
79 79
80 80 http_dav = environ.get('HTTP_DAV', '')
81 81 magic_path_segment = rhodecode.CONFIG.get(
82 82 'rhodecode_subversion_magic_path', '/!svn')
83 83 is_svn_path = (
84 84 'subversion' in http_dav or
85 85 magic_path_segment in environ['PATH_INFO']
86 86 or environ['REQUEST_METHOD'] in ['PROPFIND', 'PROPPATCH']
87 87 )
88 88 log.debug(
89 89 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'],
90 90 is_svn_path)
91 91
92 92 return is_svn_path
93 93
94 94
95 95 class GunzipMiddleware(object):
96 96 """
97 97 WSGI middleware that unzips gzip-encoded requests before
98 98 passing on to the underlying application.
99 99 """
100 100
101 101 def __init__(self, application):
102 102 self.app = application
103 103
104 104 def __call__(self, environ, start_response):
105 105 accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'')
106 106
107 107 if b'gzip' in accepts_encoding_header:
108 108 log.debug('gzip detected, now running gunzip wrapper')
109 109 wsgi_input = environ['wsgi.input']
110 110
111 111 if not hasattr(environ['wsgi.input'], 'seek'):
112 112 # The gzip implementation in the standard library of Python 2.x
113 113 # requires the '.seek()' and '.tell()' methods to be available
114 114 # on the input stream. Read the data into a temporary file to
115 115 # work around this limitation.
116 116
117 117 wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024)
118 118 shutil.copyfileobj(environ['wsgi.input'], wsgi_input)
119 119 wsgi_input.seek(0)
120 120
121 121 environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r')
122 122 # since we "Ungzipped" the content we say now it's no longer gzip
123 123 # content encoding
124 124 del environ['HTTP_CONTENT_ENCODING']
125 125
126 126 # content length has changes ? or i'm not sure
127 127 if 'CONTENT_LENGTH' in environ:
128 128 del environ['CONTENT_LENGTH']
129 129 else:
130 130 log.debug('content not gzipped, gzipMiddleware passing '
131 131 'request further')
132 132 return self.app(environ, start_response)
133 133
134 134
135 135 def is_vcs_call(environ):
136 136 if VCS_TYPE_KEY in environ:
137 137 raw_type = environ[VCS_TYPE_KEY]
138 138 return raw_type and raw_type != VCS_TYPE_SKIP
139 139 return False
140 140
141 141
142 142 def get_path_elem(route_path):
143 143 if not route_path:
144 144 return None
145 145
146 146 cleaned_route_path = route_path.lstrip('/')
147 147 if cleaned_route_path:
148 148 cleaned_route_path_elems = cleaned_route_path.split('/')
149 149 if cleaned_route_path_elems:
150 150 return cleaned_route_path_elems[0]
151 151 return None
152 152
153 153
154 154 def detect_vcs_request(environ, backends):
155 155 checks = {
156 156 'hg': (is_hg, SimpleHg),
157 157 'git': (is_git, SimpleGit),
158 158 'svn': (is_svn, SimpleSvn),
159 159 }
160 160 handler = None
161 161 # List of path views first chunk we don't do any checks
162 162 white_list = [
163 163 # e.g /_file_store/download
164 164 '_file_store',
165 165
166 166 # static files no detection
167 167 '_static',
168 168
169 # skip ops ping
170 '_admin/ops/ping',
171
169 172 # full channelstream connect should be VCS skipped
170 173 '_admin/channelstream/connect',
171 174 ]
172 175
173 176 path_info = environ['PATH_INFO']
174 177
175 178 path_elem = get_path_elem(path_info)
176 179
177 180 if path_elem in white_list:
178 181 log.debug('path `%s` in whitelist, skipping...', path_info)
179 182 return handler
180 183
181 184 path_url = path_info.lstrip('/')
182 185 if path_url in white_list:
183 186 log.debug('full url path `%s` in whitelist, skipping...', path_url)
184 187 return handler
185 188
186 189 if VCS_TYPE_KEY in environ:
187 190 raw_type = environ[VCS_TYPE_KEY]
188 191 if raw_type == VCS_TYPE_SKIP:
189 192 log.debug('got `skip` marker for vcs detection, skipping...')
190 193 return handler
191 194
192 195 _check, handler = checks.get(raw_type) or [None, None]
193 196 if handler:
194 197 log.debug('got handler:%s from environ', handler)
195 198
196 199 if not handler:
197 200 log.debug('request start: checking if request for `%s` is of VCS type in order: %s', path_elem, backends)
198 201 for vcs_type in backends:
199 202 vcs_check, _handler = checks[vcs_type]
200 203 if vcs_check(environ):
201 204 log.debug('vcs handler found %s', _handler)
202 205 handler = _handler
203 206 break
204 207
205 208 return handler
206 209
207 210
208 211 class VCSMiddleware(object):
209 212
210 213 def __init__(self, app, registry, config, appenlight_client):
211 214 self.application = app
212 215 self.registry = registry
213 216 self.config = config
214 217 self.appenlight_client = appenlight_client
215 218 self.use_gzip = True
216 219 # order in which we check the middlewares, based on vcs.backends config
217 220 self.check_middlewares = config['vcs.backends']
218 221
219 222 def vcs_config(self, repo_name=None):
220 223 """
221 224 returns serialized VcsSettings
222 225 """
223 226 try:
224 227 return VcsSettingsModel(
225 228 repo=repo_name).get_ui_settings_as_config_obj()
226 229 except Exception:
227 230 pass
228 231
229 232 def wrap_in_gzip_if_enabled(self, app, config):
230 233 if self.use_gzip:
231 234 app = GunzipMiddleware(app)
232 235 return app
233 236
234 237 def _get_handler_app(self, environ):
235 238 app = None
236 239 log.debug('VCSMiddleware: detecting vcs type.')
237 240 handler = detect_vcs_request(environ, self.check_middlewares)
238 241 if handler:
239 242 app = handler(self.config, self.registry)
240 243
241 244 return app
242 245
243 246 def __call__(self, environ, start_response):
244 247 # check if we handle one of interesting protocols, optionally extract
245 248 # specific vcsSettings and allow changes of how things are wrapped
246 249 vcs_handler = self._get_handler_app(environ)
247 250 if vcs_handler:
248 251 # translate the _REPO_ID into real repo NAME for usage
249 252 # in middleware
250 253 environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO'])
251 254
252 255 # Set acl, url and vcs repo names.
253 256 vcs_handler.set_repo_names(environ)
254 257
255 258 # register repo config back to the handler
256 259 vcs_conf = self.vcs_config(vcs_handler.acl_repo_name)
257 260 # maybe damaged/non existent settings. We still want to
258 261 # pass that point to validate on is_valid_and_existing_repo
259 262 # and return proper HTTP Code back to client
260 263 if vcs_conf:
261 264 vcs_handler.repo_vcs_config = vcs_conf
262 265
263 266 # check for type, presence in database and on filesystem
264 267 if not vcs_handler.is_valid_and_existing_repo(
265 268 vcs_handler.acl_repo_name,
266 269 vcs_handler.base_path,
267 270 vcs_handler.SCM):
268 271 return HTTPNotFound()(environ, start_response)
269 272
270 273 environ['REPO_NAME'] = vcs_handler.url_repo_name
271 274
272 275 # Wrap handler in middlewares if they are enabled.
273 276 vcs_handler = self.wrap_in_gzip_if_enabled(
274 277 vcs_handler, self.config)
275 278 vcs_handler, _ = wrap_in_appenlight_if_enabled(
276 279 vcs_handler, self.config, self.appenlight_client)
277 280
278 281 return vcs_handler(environ, start_response)
279 282
280 283 return self.application(environ, start_response)
@@ -1,312 +1,354 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import errno
23 23 import logging
24 24
25 25 import msgpack
26 26 import gevent
27 27 import redis
28 28
29 29 from dogpile.cache.api import CachedValue
30 30 from dogpile.cache.backends import memory as memory_backend
31 31 from dogpile.cache.backends import file as file_backend
32 32 from dogpile.cache.backends import redis as redis_backend
33 33 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
34 34 from dogpile.cache.util import memoized_property
35 35
36 from pyramid.settings import asbool
37
36 38 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
37 39
38 40
39 41 _default_max_size = 1024
40 42
41 43 log = logging.getLogger(__name__)
42 44
43 45
44 46 class LRUMemoryBackend(memory_backend.MemoryBackend):
45 47 key_prefix = 'lru_mem_backend'
46 48 pickle_values = False
47 49
48 50 def __init__(self, arguments):
49 51 max_size = arguments.pop('max_size', _default_max_size)
50 52
51 53 LRUDictClass = LRUDict
52 54 if arguments.pop('log_key_count', None):
53 55 LRUDictClass = LRUDictDebug
54 56
55 57 arguments['cache_dict'] = LRUDictClass(max_size)
56 58 super(LRUMemoryBackend, self).__init__(arguments)
57 59
58 60 def delete(self, key):
59 61 try:
60 62 del self._cache[key]
61 63 except KeyError:
62 64 # we don't care if key isn't there at deletion
63 65 pass
64 66
65 67 def delete_multi(self, keys):
66 68 for key in keys:
67 69 self.delete(key)
68 70
69 71
70 72 class PickleSerializer(object):
71 73
72 74 def _dumps(self, value, safe=False):
73 75 try:
74 76 return compat.pickle.dumps(value)
75 77 except Exception:
76 78 if safe:
77 79 return NO_VALUE
78 80 else:
79 81 raise
80 82
81 83 def _loads(self, value, safe=True):
82 84 try:
83 85 return compat.pickle.loads(value)
84 86 except Exception:
85 87 if safe:
86 88 return NO_VALUE
87 89 else:
88 90 raise
89 91
90 92
91 93 class MsgPackSerializer(object):
92 94
93 95 def _dumps(self, value, safe=False):
94 96 try:
95 97 return msgpack.packb(value)
96 98 except Exception:
97 99 if safe:
98 100 return NO_VALUE
99 101 else:
100 102 raise
101 103
102 104 def _loads(self, value, safe=True):
103 105 """
104 106 pickle maintained the `CachedValue` wrapper of the tuple
105 107 msgpack does not, so it must be added back in.
106 108 """
107 109 try:
108 110 value = msgpack.unpackb(value, use_list=False)
109 111 return CachedValue(*value)
110 112 except Exception:
111 113 if safe:
112 114 return NO_VALUE
113 115 else:
114 116 raise
115 117
116 118
117 119 import fcntl
118 120 flock_org = fcntl.flock
119 121
120 122
121 123 class CustomLockFactory(FileLock):
122 124
123 125 @memoized_property
124 126 def _module(self):
125 127
126 128 def gevent_flock(fd, operation):
127 129 """
128 130 Gevent compatible flock
129 131 """
130 132 # set non-blocking, this will cause an exception if we cannot acquire a lock
131 133 operation |= fcntl.LOCK_NB
132 134 start_lock_time = time.time()
133 135 timeout = 60 * 15 # 15min
134 136 while True:
135 137 try:
136 138 flock_org(fd, operation)
137 139 # lock has been acquired
138 140 break
139 141 except (OSError, IOError) as e:
140 142 # raise on other errors than Resource temporarily unavailable
141 143 if e.errno != errno.EAGAIN:
142 144 raise
143 145 elif (time.time() - start_lock_time) > timeout:
144 146 # waited to much time on a lock, better fail than loop for ever
145 147 log.error('Failed to acquire lock on `%s` after waiting %ss',
146 148 self.filename, timeout)
147 149 raise
148 150 wait_timeout = 0.03
149 151 log.debug('Failed to acquire lock on `%s`, retry in %ss',
150 152 self.filename, wait_timeout)
151 153 gevent.sleep(wait_timeout)
152 154
153 155 fcntl.flock = gevent_flock
154 156 return fcntl
155 157
156 158
157 159 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
158 160 key_prefix = 'file_backend'
159 161
160 162 def __init__(self, arguments):
161 163 arguments['lock_factory'] = CustomLockFactory
162 164 db_file = arguments.get('filename')
163 165
164 166 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
165 167 try:
166 168 super(FileNamespaceBackend, self).__init__(arguments)
167 169 except Exception:
168 170 log.error('Failed to initialize db at: %s', db_file)
169 171 raise
170 172
171 173 def __repr__(self):
172 174 return '{} `{}`'.format(self.__class__, self.filename)
173 175
174 176 def list_keys(self, prefix=''):
175 177 prefix = '{}:{}'.format(self.key_prefix, prefix)
176 178
177 179 def cond(v):
178 180 if not prefix:
179 181 return True
180 182
181 183 if v.startswith(prefix):
182 184 return True
183 185 return False
184 186
185 187 with self._dbm_file(True) as dbm:
186 188 try:
187 189 return filter(cond, dbm.keys())
188 190 except Exception:
189 191 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
190 192 raise
191 193
192 194 def get_store(self):
193 195 return self.filename
194 196
195 197 def _dbm_get(self, key):
196 198 with self._dbm_file(False) as dbm:
197 199 if hasattr(dbm, 'get'):
198 200 value = dbm.get(key, NO_VALUE)
199 201 else:
200 202 # gdbm objects lack a .get method
201 203 try:
202 204 value = dbm[key]
203 205 except KeyError:
204 206 value = NO_VALUE
205 207 if value is not NO_VALUE:
206 208 value = self._loads(value)
207 209 return value
208 210
209 211 def get(self, key):
210 212 try:
211 213 return self._dbm_get(key)
212 214 except Exception:
213 215 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
214 216 raise
215 217
216 218 def set(self, key, value):
217 219 with self._dbm_file(True) as dbm:
218 220 dbm[key] = self._dumps(value)
219 221
220 222 def set_multi(self, mapping):
221 223 with self._dbm_file(True) as dbm:
222 224 for key, value in mapping.items():
223 225 dbm[key] = self._dumps(value)
224 226
225 227
226 228 class BaseRedisBackend(redis_backend.RedisBackend):
229 key_prefix = ''
230
231 def __init__(self, arguments):
232 super(BaseRedisBackend, self).__init__(arguments)
233 self._lock_timeout = self.lock_timeout
234 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
235
236 if self._lock_auto_renewal and not self._lock_timeout:
237 # set default timeout for auto_renewal
238 self._lock_timeout = 30
227 239
228 240 def _create_client(self):
229 241 args = {}
230 242
231 243 if self.url is not None:
232 244 args.update(url=self.url)
233 245
234 246 else:
235 247 args.update(
236 248 host=self.host, password=self.password,
237 249 port=self.port, db=self.db
238 250 )
239 251
240 252 connection_pool = redis.ConnectionPool(**args)
241 253
242 254 return redis.StrictRedis(connection_pool=connection_pool)
243 255
244 256 def list_keys(self, prefix=''):
245 257 prefix = '{}:{}*'.format(self.key_prefix, prefix)
246 258 return self.client.keys(prefix)
247 259
248 260 def get_store(self):
249 261 return self.client.connection_pool
250 262
251 263 def get(self, key):
252 264 value = self.client.get(key)
253 265 if value is None:
254 266 return NO_VALUE
255 267 return self._loads(value)
256 268
257 269 def get_multi(self, keys):
258 270 if not keys:
259 271 return []
260 272 values = self.client.mget(keys)
261 273 loads = self._loads
262 274 return [
263 275 loads(v) if v is not None else NO_VALUE
264 276 for v in values]
265 277
266 278 def set(self, key, value):
267 279 if self.redis_expiration_time:
268 280 self.client.setex(key, self.redis_expiration_time,
269 281 self._dumps(value))
270 282 else:
271 283 self.client.set(key, self._dumps(value))
272 284
273 285 def set_multi(self, mapping):
274 286 dumps = self._dumps
275 287 mapping = dict(
276 288 (k, dumps(v))
277 289 for k, v in mapping.items()
278 290 )
279 291
280 292 if not self.redis_expiration_time:
281 293 self.client.mset(mapping)
282 294 else:
283 295 pipe = self.client.pipeline()
284 296 for key, value in mapping.items():
285 297 pipe.setex(key, self.redis_expiration_time, value)
286 298 pipe.execute()
287 299
288 300 def get_mutex(self, key):
289 301 if self.distributed_lock:
290 import redis_lock
291 302 lock_key = redis_backend.u('_lock_{0}').format(key)
292 303 log.debug('Trying to acquire Redis lock for key %s', lock_key)
293 lock = redis_lock.Lock(
294 redis_client=self.client,
295 name=lock_key,
296 expire=self.lock_timeout,
297 auto_renewal=False,
298 strict=True,
299 )
300 return lock
304 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
305 auto_renewal=self._lock_auto_renewal)
301 306 else:
302 307 return None
303 308
304 309
305 310 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
306 311 key_prefix = 'redis_pickle_backend'
307 312 pass
308 313
309 314
310 315 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
311 316 key_prefix = 'redis_msgpack_backend'
312 317 pass
318
319
320 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
321 import redis_lock
322
323 class _RedisLockWrapper(object):
324 """LockWrapper for redis_lock"""
325
326 @classmethod
327 def get_lock(cls):
328 return redis_lock.Lock(
329 redis_client=client,
330 name=lock_key,
331 expire=lock_timeout,
332 auto_renewal=auto_renewal,
333 strict=True,
334 )
335
336 def __init__(self):
337 self.lock = self.get_lock()
338
339 def acquire(self, wait=True):
340 try:
341 return self.lock.acquire(wait)
342 except redis_lock.AlreadyAcquired:
343 return False
344 except redis_lock.AlreadyStarted:
345 # refresh thread exists, but it also means we acquired the lock
346 return True
347
348 def release(self):
349 try:
350 self.lock.release()
351 except redis_lock.NotAcquired:
352 pass
353
354 return _RedisLockWrapper()
@@ -1,415 +1,418 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import os
21 21 import time
22 22 import logging
23 23 import functools
24 24 import threading
25 25
26 26 from dogpile.cache import CacheRegion
27 27 from dogpile.cache.util import compat
28 28
29 29 import rhodecode
30 30 from rhodecode.lib.utils import safe_str, sha1
31 31 from rhodecode.lib.utils2 import safe_unicode, str2bool
32 32 from rhodecode.model.db import Session, CacheKey, IntegrityError
33 33
34 34 from rhodecode.lib.rc_cache import cache_key_meta
35 35 from rhodecode.lib.rc_cache import region_meta
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 def isCython(func):
41 41 """
42 42 Private helper that checks if a function is a cython function.
43 43 """
44 44 return func.__class__.__name__ == 'cython_function_or_method'
45 45
46 46
47 47 class RhodeCodeCacheRegion(CacheRegion):
48 48
49 49 def conditional_cache_on_arguments(
50 50 self, namespace=None,
51 51 expiration_time=None,
52 52 should_cache_fn=None,
53 53 to_str=compat.string_type,
54 54 function_key_generator=None,
55 55 condition=True):
56 56 """
57 57 Custom conditional decorator, that will not touch any dogpile internals if
58 58 condition isn't meet. This works a bit different than should_cache_fn
59 59 And it's faster in cases we don't ever want to compute cached values
60 60 """
61 61 expiration_time_is_callable = compat.callable(expiration_time)
62 62
63 63 if function_key_generator is None:
64 64 function_key_generator = self.function_key_generator
65 65
66 66 # workaround for py2 and cython problems, this block should be removed
67 67 # once we've migrated to py3
68 68 if 'cython' == 'cython':
69 69 def decorator(fn):
70 70 if to_str is compat.string_type:
71 71 # backwards compatible
72 72 key_generator = function_key_generator(namespace, fn)
73 73 else:
74 74 key_generator = function_key_generator(namespace, fn, to_str=to_str)
75 75
76 76 @functools.wraps(fn)
77 77 def decorate(*arg, **kw):
78 78 key = key_generator(*arg, **kw)
79 79
80 80 @functools.wraps(fn)
81 81 def creator():
82 82 return fn(*arg, **kw)
83 83
84 84 if not condition:
85 85 return creator()
86 86
87 87 timeout = expiration_time() if expiration_time_is_callable \
88 88 else expiration_time
89 89
90 90 return self.get_or_create(key, creator, timeout, should_cache_fn)
91 91
92 92 def invalidate(*arg, **kw):
93 93 key = key_generator(*arg, **kw)
94 94 self.delete(key)
95 95
96 96 def set_(value, *arg, **kw):
97 97 key = key_generator(*arg, **kw)
98 98 self.set(key, value)
99 99
100 100 def get(*arg, **kw):
101 101 key = key_generator(*arg, **kw)
102 102 return self.get(key)
103 103
104 104 def refresh(*arg, **kw):
105 105 key = key_generator(*arg, **kw)
106 106 value = fn(*arg, **kw)
107 107 self.set(key, value)
108 108 return value
109 109
110 110 decorate.set = set_
111 111 decorate.invalidate = invalidate
112 112 decorate.refresh = refresh
113 113 decorate.get = get
114 114 decorate.original = fn
115 115 decorate.key_generator = key_generator
116 116 decorate.__wrapped__ = fn
117 117
118 118 return decorate
119 119 return decorator
120 120
121 121 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
122 122
123 123 if not condition:
124 124 log.debug('Calling un-cached func:%s', user_func.func_name)
125 125 return user_func(*arg, **kw)
126 126
127 127 key = key_generator(*arg, **kw)
128 128
129 129 timeout = expiration_time() if expiration_time_is_callable \
130 130 else expiration_time
131 131
132 132 log.debug('Calling cached fn:%s', user_func.func_name)
133 133 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
134 134
135 135 def cache_decorator(user_func):
136 136 if to_str is compat.string_type:
137 137 # backwards compatible
138 138 key_generator = function_key_generator(namespace, user_func)
139 139 else:
140 140 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
141 141
142 142 def refresh(*arg, **kw):
143 143 """
144 144 Like invalidate, but regenerates the value instead
145 145 """
146 146 key = key_generator(*arg, **kw)
147 147 value = user_func(*arg, **kw)
148 148 self.set(key, value)
149 149 return value
150 150
151 151 def invalidate(*arg, **kw):
152 152 key = key_generator(*arg, **kw)
153 153 self.delete(key)
154 154
155 155 def set_(value, *arg, **kw):
156 156 key = key_generator(*arg, **kw)
157 157 self.set(key, value)
158 158
159 159 def get(*arg, **kw):
160 160 key = key_generator(*arg, **kw)
161 161 return self.get(key)
162 162
163 163 user_func.set = set_
164 164 user_func.invalidate = invalidate
165 165 user_func.get = get
166 166 user_func.refresh = refresh
167 167 user_func.key_generator = key_generator
168 168 user_func.original = user_func
169 169
170 170 # Use `decorate` to preserve the signature of :param:`user_func`.
171 171 return decorator.decorate(user_func, functools.partial(
172 172 get_or_create_for_user_func, key_generator))
173 173
174 174 return cache_decorator
175 175
176 176
177 177 def make_region(*arg, **kw):
178 178 return RhodeCodeCacheRegion(*arg, **kw)
179 179
180 180
181 181 def get_default_cache_settings(settings, prefixes=None):
182 182 prefixes = prefixes or []
183 183 cache_settings = {}
184 184 for key in settings.keys():
185 185 for prefix in prefixes:
186 186 if key.startswith(prefix):
187 187 name = key.split(prefix)[1].strip()
188 188 val = settings[key]
189 189 if isinstance(val, compat.string_types):
190 190 val = val.strip()
191 191 cache_settings[name] = val
192 192 return cache_settings
193 193
194 194
195 195 def compute_key_from_params(*args):
196 196 """
197 197 Helper to compute key from given params to be used in cache manager
198 198 """
199 199 return sha1("_".join(map(safe_str, args)))
200 200
201 201
202 202 def backend_key_generator(backend):
203 203 """
204 204 Special wrapper that also sends over the backend to the key generator
205 205 """
206 206 def wrapper(namespace, fn):
207 207 return key_generator(backend, namespace, fn)
208 208 return wrapper
209 209
210 210
211 211 def key_generator(backend, namespace, fn):
212 212 fname = fn.__name__
213 213
214 214 def generate_key(*args):
215 215 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
216 216 namespace_pref = namespace or 'default_namespace'
217 217 arg_key = compute_key_from_params(*args)
218 218 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
219 219
220 220 return final_key
221 221
222 222 return generate_key
223 223
224 224
225 225 def get_or_create_region(region_name, region_namespace=None):
226 226 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
227 227 region_obj = region_meta.dogpile_cache_regions.get(region_name)
228 228 if not region_obj:
229 229 raise EnvironmentError(
230 230 'Region `{}` not in configured: {}.'.format(
231 231 region_name, region_meta.dogpile_cache_regions.keys()))
232 232
233 233 region_uid_name = '{}:{}'.format(region_name, region_namespace)
234 234 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
235 235 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
236 236 if region_exist:
237 237 log.debug('Using already configured region: %s', region_namespace)
238 238 return region_exist
239 239 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
240 240 expiration_time = region_obj.expiration_time
241 241
242 242 if not os.path.isdir(cache_dir):
243 243 os.makedirs(cache_dir)
244 244 new_region = make_region(
245 245 name=region_uid_name,
246 246 function_key_generator=backend_key_generator(region_obj.actual_backend)
247 247 )
248 248 namespace_filename = os.path.join(
249 249 cache_dir, "{}.cache.dbm".format(region_namespace))
250 250 # special type that allows 1db per namespace
251 251 new_region.configure(
252 252 backend='dogpile.cache.rc.file_namespace',
253 253 expiration_time=expiration_time,
254 254 arguments={"filename": namespace_filename}
255 255 )
256 256
257 257 # create and save in region caches
258 258 log.debug('configuring new region: %s', region_uid_name)
259 259 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
260 260
261 261 return region_obj
262 262
263 263
264 def clear_cache_namespace(cache_region, cache_namespace_uid):
264 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
265 265 region = get_or_create_region(cache_region, cache_namespace_uid)
266 266 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
267 267 num_delete_keys = len(cache_keys)
268 if invalidate:
269 region.invalidate(hard=False)
270 else:
268 271 if num_delete_keys:
269 272 region.delete_multi(cache_keys)
270 273 return num_delete_keys
271 274
272 275
273 276 class ActiveRegionCache(object):
274 277 def __init__(self, context, cache_data):
275 278 self.context = context
276 279 self.cache_data = cache_data
277 280
278 281 def should_invalidate(self):
279 282 return False
280 283
281 284
282 285 class FreshRegionCache(object):
283 286 def __init__(self, context, cache_data):
284 287 self.context = context
285 288 self.cache_data = cache_data
286 289
287 290 def should_invalidate(self):
288 291 return True
289 292
290 293
291 294 class InvalidationContext(object):
292 295 """
293 296 usage::
294 297
295 298 from rhodecode.lib import rc_cache
296 299
297 300 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
298 301 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
299 302
300 303 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
301 304 def heavy_compute(cache_name, param1, param2):
302 305 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
303 306
304 307 # invalidation namespace is shared namespace key for all process caches
305 308 # we use it to send a global signal
306 309 invalidation_namespace = 'repo_cache:1'
307 310
308 311 inv_context_manager = rc_cache.InvalidationContext(
309 312 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
310 313 with inv_context_manager as invalidation_context:
311 314 args = ('one', 'two')
312 315 # re-compute and store cache if we get invalidate signal
313 316 if invalidation_context.should_invalidate():
314 317 result = heavy_compute.refresh(*args)
315 318 else:
316 319 result = heavy_compute(*args)
317 320
318 321 compute_time = inv_context_manager.compute_time
319 322 log.debug('result computed in %.4fs', compute_time)
320 323
321 324 # To send global invalidation signal, simply run
322 325 CacheKey.set_invalidate(invalidation_namespace)
323 326
324 327 """
325 328
326 329 def __repr__(self):
327 330 return '<InvalidationContext:{}[{}]>'.format(
328 331 safe_str(self.cache_key), safe_str(self.uid))
329 332
330 333 def __init__(self, uid, invalidation_namespace='',
331 334 raise_exception=False, thread_scoped=None):
332 335 self.uid = uid
333 336 self.invalidation_namespace = invalidation_namespace
334 337 self.raise_exception = raise_exception
335 338 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
336 339 self.thread_id = 'global'
337 340
338 341 if thread_scoped is None:
339 342 # if we set "default" we can override this via .ini settings
340 343 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
341 344
342 345 # Append the thread id to the cache key if this invalidation context
343 346 # should be scoped to the current thread.
344 347 if thread_scoped is True:
345 348 self.thread_id = threading.current_thread().ident
346 349
347 350 self.cache_key = compute_key_from_params(uid)
348 351 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
349 352 self.proc_id, self.thread_id, self.cache_key)
350 353 self.compute_time = 0
351 354
352 355 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
353 356 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
354 357 # fetch all cache keys for this namespace and convert them to a map to find if we
355 358 # have specific cache_key object registered. We do this because we want to have
356 359 # all consistent cache_state_uid for newly registered objects
357 360 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
358 361 cache_obj = cache_obj_map.get(self.cache_key)
359 362 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
360 363 if not cache_obj:
361 364 new_cache_args = invalidation_namespace
362 365 first_cache_obj = next(cache_obj_map.itervalues()) if cache_obj_map else None
363 366 cache_state_uid = None
364 367 if first_cache_obj:
365 368 cache_state_uid = first_cache_obj.cache_state_uid
366 369 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
367 370 cache_state_uid=cache_state_uid)
368 371 cache_key_meta.cache_keys_by_pid.append(self.cache_key)
369 372
370 373 return cache_obj
371 374
372 375 def __enter__(self):
373 376 """
374 377 Test if current object is valid, and return CacheRegion function
375 378 that does invalidation and calculation
376 379 """
377 380 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
378 381 # register or get a new key based on uid
379 382 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
380 383 cache_data = self.cache_obj.get_dict()
381 384 self._start_time = time.time()
382 385 if self.cache_obj.cache_active:
383 386 # means our cache obj is existing and marked as it's
384 387 # cache is not outdated, we return ActiveRegionCache
385 388 self.skip_cache_active_change = True
386 389
387 390 return ActiveRegionCache(context=self, cache_data=cache_data)
388 391
389 392 # the key is either not existing or set to False, we return
390 393 # the real invalidator which re-computes value. We additionally set
391 394 # the flag to actually update the Database objects
392 395 self.skip_cache_active_change = False
393 396 return FreshRegionCache(context=self, cache_data=cache_data)
394 397
395 398 def __exit__(self, exc_type, exc_val, exc_tb):
396 399 # save compute time
397 400 self.compute_time = time.time() - self._start_time
398 401
399 402 if self.skip_cache_active_change:
400 403 return
401 404
402 405 try:
403 406 self.cache_obj.cache_active = True
404 407 Session().add(self.cache_obj)
405 408 Session().commit()
406 409 except IntegrityError:
407 410 # if we catch integrity error, it means we inserted this object
408 411 # assumption is that's really an edge race-condition case and
409 412 # it's safe is to skip it
410 413 Session().rollback()
411 414 except Exception:
412 415 log.exception('Failed to commit on cache key update')
413 416 Session().rollback()
414 417 if self.raise_exception:
415 418 raise
@@ -1,2378 +1,2379 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 _compare_data = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message"]
161 161 )
162 162
163 163 for commit in _compare_data:
164 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 165 # at this function which is later called via JSON serialization
166 166 serialized_commit = dict(
167 167 author=commit.author,
168 168 date=commit.date,
169 169 message=commit.message,
170 170 commit_id=commit.raw_id,
171 171 raw_id=commit.raw_id
172 172 )
173 173 commits.append(serialized_commit)
174 174 user = User.get_from_cs_author(serialized_commit['author'])
175 175 if user and user not in commit_authors:
176 176 commit_authors.append(user)
177 177
178 178 # lines
179 179 if get_authors:
180 180 log.debug('Calculating authors of changed files')
181 181 target_commit = source_repo.get_commit(ancestor_id)
182 182
183 183 for fname, lines in changed_lines.items():
184 184
185 185 try:
186 186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 187 except Exception:
188 188 log.exception("Failed to load node with path %s", fname)
189 189 continue
190 190
191 191 if not isinstance(node, FileNode):
192 192 continue
193 193
194 194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 195 if node.is_binary:
196 196 author = node.last_commit.author
197 197 email = node.last_commit.author_email
198 198
199 199 user = User.get_from_cs_author(author)
200 200 if user:
201 201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 202 author_counts[author] = author_counts.get(author, 0) + 1
203 203 email_counts[email] = email_counts.get(email, 0) + 1
204 204
205 205 continue
206 206
207 207 for annotation in node.annotate:
208 208 line_no, commit_id, get_commit_func, line_text = annotation
209 209 if line_no in lines:
210 210 if commit_id not in _commit_cache:
211 211 _commit_cache[commit_id] = get_commit_func()
212 212 commit = _commit_cache[commit_id]
213 213 author = commit.author
214 214 email = commit.author_email
215 215 user = User.get_from_cs_author(author)
216 216 if user:
217 217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 218 author_counts[author] = author_counts.get(author, 0) + 1
219 219 email_counts[email] = email_counts.get(email, 0) + 1
220 220
221 221 log.debug('Default reviewers processing finished')
222 222
223 223 return {
224 224 'commits': commits,
225 225 'files': all_files_changes,
226 226 'stats': stats,
227 227 'ancestor': ancestor_id,
228 228 # original authors of modified files
229 229 'original_authors': {
230 230 'users': user_counts,
231 231 'authors': author_counts,
232 232 'emails': email_counts,
233 233 },
234 234 'commit_authors': commit_authors
235 235 }
236 236
237 237
238 238 class PullRequestModel(BaseModel):
239 239
240 240 cls = PullRequest
241 241
242 242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243 243
244 244 UPDATE_STATUS_MESSAGES = {
245 245 UpdateFailureReason.NONE: lazy_ugettext(
246 246 'Pull request update successful.'),
247 247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 248 'Pull request update failed because of an unknown error.'),
249 249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 250 'No update needed because the source and target have not changed.'),
251 251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 252 'Pull request cannot be updated because the reference type is '
253 253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 255 'This pull request cannot be updated because the target '
256 256 'reference is missing.'),
257 257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 258 'This pull request cannot be updated because the source '
259 259 'reference is missing.'),
260 260 }
261 261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263 263
264 264 def __get_pull_request(self, pull_request):
265 265 return self._get_instance((
266 266 PullRequest, PullRequestVersion), pull_request)
267 267
268 268 def _check_perms(self, perms, pull_request, user, api=False):
269 269 if not api:
270 270 return h.HasRepoPermissionAny(*perms)(
271 271 user=user, repo_name=pull_request.target_repo.repo_name)
272 272 else:
273 273 return h.HasRepoPermissionAnyApi(*perms)(
274 274 user=user, repo_name=pull_request.target_repo.repo_name)
275 275
276 276 def check_user_read(self, pull_request, user, api=False):
277 277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 278 return self._check_perms(_perms, pull_request, user, api)
279 279
280 280 def check_user_merge(self, pull_request, user, api=False):
281 281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 282 return self._check_perms(_perms, pull_request, user, api)
283 283
284 284 def check_user_update(self, pull_request, user, api=False):
285 285 owner = user.user_id == pull_request.user_id
286 286 return self.check_user_merge(pull_request, user, api) or owner
287 287
288 288 def check_user_delete(self, pull_request, user):
289 289 owner = user.user_id == pull_request.user_id
290 290 _perms = ('repository.admin',)
291 291 return self._check_perms(_perms, pull_request, user) or owner
292 292
293 293 def is_user_reviewer(self, pull_request, user):
294 294 return user.user_id in [
295 295 x.user_id for x in
296 296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 297 if x.user
298 298 ]
299 299
300 300 def check_user_change_status(self, pull_request, user, api=False):
301 301 return self.check_user_update(pull_request, user, api) \
302 302 or self.is_user_reviewer(pull_request, user)
303 303
304 304 def check_user_comment(self, pull_request, user):
305 305 owner = user.user_id == pull_request.user_id
306 306 return self.check_user_read(pull_request, user) or owner
307 307
308 308 def get(self, pull_request):
309 309 return self.__get_pull_request(pull_request)
310 310
311 311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 312 statuses=None, opened_by=None, order_by=None,
313 313 order_dir='desc', only_created=False):
314 314 repo = None
315 315 if repo_name:
316 316 repo = self._get_repo(repo_name)
317 317
318 318 q = PullRequest.query()
319 319
320 320 if search_q:
321 321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 322 q = q.join(User, User.user_id == PullRequest.user_id)
323 323 q = q.filter(or_(
324 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 325 User.username.ilike(like_expression),
326 326 PullRequest.title.ilike(like_expression),
327 327 PullRequest.description.ilike(like_expression),
328 328 ))
329 329
330 330 # source or target
331 331 if repo and source:
332 332 q = q.filter(PullRequest.source_repo == repo)
333 333 elif repo:
334 334 q = q.filter(PullRequest.target_repo == repo)
335 335
336 336 # closed,opened
337 337 if statuses:
338 338 q = q.filter(PullRequest.status.in_(statuses))
339 339
340 340 # opened by filter
341 341 if opened_by:
342 342 q = q.filter(PullRequest.user_id.in_(opened_by))
343 343
344 344 # only get those that are in "created" state
345 345 if only_created:
346 346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347 347
348 if order_by:
349 348 order_map = {
350 349 'name_raw': PullRequest.pull_request_id,
351 350 'id': PullRequest.pull_request_id,
352 351 'title': PullRequest.title,
353 352 'updated_on_raw': PullRequest.updated_on,
354 353 'target_repo': PullRequest.target_repo_id
355 354 }
355 if order_by and order_by in order_map:
356 356 if order_dir == 'asc':
357 357 q = q.order_by(order_map[order_by].asc())
358 358 else:
359 359 q = q.order_by(order_map[order_by].desc())
360 360
361 361 return q
362 362
363 363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 364 opened_by=None):
365 365 """
366 366 Count the number of pull requests for a specific repository.
367 367
368 368 :param repo_name: target or source repo
369 369 :param search_q: filter by text
370 370 :param source: boolean flag to specify if repo_name refers to source
371 371 :param statuses: list of pull request statuses
372 372 :param opened_by: author user of the pull request
373 373 :returns: int number of pull requests
374 374 """
375 375 q = self._prepare_get_all_query(
376 376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 377 opened_by=opened_by)
378 378
379 379 return q.count()
380 380
381 381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 383 """
384 384 Get all pull requests for a specific repository.
385 385
386 386 :param repo_name: target or source repo
387 387 :param search_q: filter by text
388 388 :param source: boolean flag to specify if repo_name refers to source
389 389 :param statuses: list of pull request statuses
390 390 :param opened_by: author user of the pull request
391 391 :param offset: pagination offset
392 392 :param length: length of returned list
393 393 :param order_by: order of the returned list
394 394 :param order_dir: 'asc' or 'desc' ordering direction
395 395 :returns: list of pull requests
396 396 """
397 397 q = self._prepare_get_all_query(
398 398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
409 409 """
410 410 Count the number of pull requests for a specific repository that are
411 411 awaiting review.
412 412
413 413 :param repo_name: target or source repo
414 414 :param search_q: filter by text
415 415 :param statuses: list of pull request statuses
416 416 :returns: int number of pull requests
417 417 """
418 418 pull_requests = self.get_awaiting_review(
419 419 repo_name, search_q=search_q, statuses=statuses)
420 420
421 421 return len(pull_requests)
422 422
423 423 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
424 424 offset=0, length=None, order_by=None, order_dir='desc'):
425 425 """
426 426 Get all pull requests for a specific repository that are awaiting
427 427 review.
428 428
429 429 :param repo_name: target or source repo
430 430 :param search_q: filter by text
431 431 :param statuses: list of pull request statuses
432 432 :param offset: pagination offset
433 433 :param length: length of returned list
434 434 :param order_by: order of the returned list
435 435 :param order_dir: 'asc' or 'desc' ordering direction
436 436 :returns: list of pull requests
437 437 """
438 438 pull_requests = self.get_all(
439 439 repo_name, search_q=search_q, statuses=statuses,
440 440 order_by=order_by, order_dir=order_dir)
441 441
442 442 _filtered_pull_requests = []
443 443 for pr in pull_requests:
444 444 status = pr.calculated_review_status()
445 445 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
446 446 ChangesetStatus.STATUS_UNDER_REVIEW]:
447 447 _filtered_pull_requests.append(pr)
448 448 if length:
449 449 return _filtered_pull_requests[offset:offset+length]
450 450 else:
451 451 return _filtered_pull_requests
452 452
453 453 def _prepare_awaiting_my_review_review_query(
454 454 self, repo_name, user_id, search_q=None, statuses=None,
455 455 order_by=None, order_dir='desc'):
456 456
457 457 for_review_statuses = [
458 458 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
459 459 ]
460 460
461 461 pull_request_alias = aliased(PullRequest)
462 462 status_alias = aliased(ChangesetStatus)
463 463 reviewers_alias = aliased(PullRequestReviewers)
464 464 repo_alias = aliased(Repository)
465 465
466 466 last_ver_subq = Session()\
467 467 .query(func.min(ChangesetStatus.version)) \
468 468 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
469 469 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
470 470 .subquery()
471 471
472 472 q = Session().query(pull_request_alias) \
473 473 .options(lazyload(pull_request_alias.author)) \
474 474 .join(reviewers_alias,
475 475 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
476 476 .join(repo_alias,
477 477 repo_alias.repo_id == pull_request_alias.target_repo_id) \
478 478 .outerjoin(status_alias,
479 479 and_(status_alias.user_id == reviewers_alias.user_id,
480 480 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
481 481 .filter(or_(status_alias.version == null(),
482 482 status_alias.version == last_ver_subq)) \
483 483 .filter(reviewers_alias.user_id == user_id) \
484 484 .filter(repo_alias.repo_name == repo_name) \
485 485 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
486 486 .group_by(pull_request_alias)
487 487
488 488 # closed,opened
489 489 if statuses:
490 490 q = q.filter(pull_request_alias.status.in_(statuses))
491 491
492 492 if search_q:
493 493 like_expression = u'%{}%'.format(safe_unicode(search_q))
494 494 q = q.join(User, User.user_id == pull_request_alias.user_id)
495 495 q = q.filter(or_(
496 496 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
497 497 User.username.ilike(like_expression),
498 498 pull_request_alias.title.ilike(like_expression),
499 499 pull_request_alias.description.ilike(like_expression),
500 500 ))
501 501
502 if order_by:
503 502 order_map = {
504 503 'name_raw': pull_request_alias.pull_request_id,
505 504 'title': pull_request_alias.title,
506 505 'updated_on_raw': pull_request_alias.updated_on,
507 506 'target_repo': pull_request_alias.target_repo_id
508 507 }
508 if order_by and order_by in order_map:
509 509 if order_dir == 'asc':
510 510 q = q.order_by(order_map[order_by].asc())
511 511 else:
512 512 q = q.order_by(order_map[order_by].desc())
513 513
514 514 return q
515 515
516 516 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
517 517 """
518 518 Count the number of pull requests for a specific repository that are
519 519 awaiting review from a specific user.
520 520
521 521 :param repo_name: target or source repo
522 522 :param user_id: reviewer user of the pull request
523 523 :param search_q: filter by text
524 524 :param statuses: list of pull request statuses
525 525 :returns: int number of pull requests
526 526 """
527 527 q = self._prepare_awaiting_my_review_review_query(
528 528 repo_name, user_id, search_q=search_q, statuses=statuses)
529 529 return q.count()
530 530
531 531 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
532 532 offset=0, length=None, order_by=None, order_dir='desc'):
533 533 """
534 534 Get all pull requests for a specific repository that are awaiting
535 535 review from a specific user.
536 536
537 537 :param repo_name: target or source repo
538 538 :param user_id: reviewer user of the pull request
539 539 :param search_q: filter by text
540 540 :param statuses: list of pull request statuses
541 541 :param offset: pagination offset
542 542 :param length: length of returned list
543 543 :param order_by: order of the returned list
544 544 :param order_dir: 'asc' or 'desc' ordering direction
545 545 :returns: list of pull requests
546 546 """
547 547
548 548 q = self._prepare_awaiting_my_review_review_query(
549 549 repo_name, user_id, search_q=search_q, statuses=statuses,
550 550 order_by=order_by, order_dir=order_dir)
551 551
552 552 if length:
553 553 pull_requests = q.limit(length).offset(offset).all()
554 554 else:
555 555 pull_requests = q.all()
556 556
557 557 return pull_requests
558 558
559 559 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
560 560 order_by=None, order_dir='desc'):
561 561 """
562 562 return a query of pull-requests user is an creator, or he's added as a reviewer
563 563 """
564 564 q = PullRequest.query()
565 565 if user_id:
566 566 reviewers_subquery = Session().query(
567 567 PullRequestReviewers.pull_request_id).filter(
568 568 PullRequestReviewers.user_id == user_id).subquery()
569 569 user_filter = or_(
570 570 PullRequest.user_id == user_id,
571 571 PullRequest.pull_request_id.in_(reviewers_subquery)
572 572 )
573 573 q = PullRequest.query().filter(user_filter)
574 574
575 575 # closed,opened
576 576 if statuses:
577 577 q = q.filter(PullRequest.status.in_(statuses))
578 578
579 579 if query:
580 580 like_expression = u'%{}%'.format(safe_unicode(query))
581 581 q = q.join(User, User.user_id == PullRequest.user_id)
582 582 q = q.filter(or_(
583 583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
584 584 User.username.ilike(like_expression),
585 585 PullRequest.title.ilike(like_expression),
586 586 PullRequest.description.ilike(like_expression),
587 587 ))
588 if order_by:
588
589 589 order_map = {
590 590 'name_raw': PullRequest.pull_request_id,
591 591 'title': PullRequest.title,
592 592 'updated_on_raw': PullRequest.updated_on,
593 593 'target_repo': PullRequest.target_repo_id
594 594 }
595 if order_by and order_by in order_map:
595 596 if order_dir == 'asc':
596 597 q = q.order_by(order_map[order_by].asc())
597 598 else:
598 599 q = q.order_by(order_map[order_by].desc())
599 600
600 601 return q
601 602
602 603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 605 return q.count()
605 606
606 607 def get_im_participating_in(
607 608 self, user_id=None, statuses=None, query='', offset=0,
608 609 length=None, order_by=None, order_dir='desc'):
609 610 """
610 611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 612 """
612 613
613 614 q = self._prepare_im_participating_query(
614 615 user_id, statuses=statuses, query=query, order_by=order_by,
615 616 order_dir=order_dir)
616 617
617 618 if length:
618 619 pull_requests = q.limit(length).offset(offset).all()
619 620 else:
620 621 pull_requests = q.all()
621 622
622 623 return pull_requests
623 624
624 625 def _prepare_participating_in_for_review_query(
625 626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626 627
627 628 for_review_statuses = [
628 629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 630 ]
630 631
631 632 pull_request_alias = aliased(PullRequest)
632 633 status_alias = aliased(ChangesetStatus)
633 634 reviewers_alias = aliased(PullRequestReviewers)
634 635
635 636 last_ver_subq = Session()\
636 637 .query(func.min(ChangesetStatus.version)) \
637 638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 640 .subquery()
640 641
641 642 q = Session().query(pull_request_alias) \
642 643 .options(lazyload(pull_request_alias.author)) \
643 644 .join(reviewers_alias,
644 645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 646 .outerjoin(status_alias,
646 647 and_(status_alias.user_id == reviewers_alias.user_id,
647 648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 649 .filter(or_(status_alias.version == null(),
649 650 status_alias.version == last_ver_subq)) \
650 651 .filter(reviewers_alias.user_id == user_id) \
651 652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 653 .group_by(pull_request_alias)
653 654
654 655 # closed,opened
655 656 if statuses:
656 657 q = q.filter(pull_request_alias.status.in_(statuses))
657 658
658 659 if query:
659 660 like_expression = u'%{}%'.format(safe_unicode(query))
660 661 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 662 q = q.filter(or_(
662 663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 664 User.username.ilike(like_expression),
664 665 pull_request_alias.title.ilike(like_expression),
665 666 pull_request_alias.description.ilike(like_expression),
666 667 ))
667 668
668 if order_by:
669 669 order_map = {
670 670 'name_raw': pull_request_alias.pull_request_id,
671 671 'title': pull_request_alias.title,
672 672 'updated_on_raw': pull_request_alias.updated_on,
673 673 'target_repo': pull_request_alias.target_repo_id
674 674 }
675 if order_by and order_by in order_map:
675 676 if order_dir == 'asc':
676 677 q = q.order_by(order_map[order_by].asc())
677 678 else:
678 679 q = q.order_by(order_map[order_by].desc())
679 680
680 681 return q
681 682
682 683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 685 return q.count()
685 686
686 687 def get_im_participating_in_for_review(
687 688 self, user_id, statuses=None, query='', offset=0,
688 689 length=None, order_by=None, order_dir='desc'):
689 690 """
690 691 Get all Pull requests that needs user approval or rejection
691 692 """
692 693
693 694 q = self._prepare_participating_in_for_review_query(
694 695 user_id, statuses=statuses, query=query, order_by=order_by,
695 696 order_dir=order_dir)
696 697
697 698 if length:
698 699 pull_requests = q.limit(length).offset(offset).all()
699 700 else:
700 701 pull_requests = q.all()
701 702
702 703 return pull_requests
703 704
704 705 def get_versions(self, pull_request):
705 706 """
706 707 returns version of pull request sorted by ID descending
707 708 """
708 709 return PullRequestVersion.query()\
709 710 .filter(PullRequestVersion.pull_request == pull_request)\
710 711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 712 .all()
712 713
713 714 def get_pr_version(self, pull_request_id, version=None):
714 715 at_version = None
715 716
716 717 if version and version == 'latest':
717 718 pull_request_ver = PullRequest.get(pull_request_id)
718 719 pull_request_obj = pull_request_ver
719 720 _org_pull_request_obj = pull_request_obj
720 721 at_version = 'latest'
721 722 elif version:
722 723 pull_request_ver = PullRequestVersion.get_or_404(version)
723 724 pull_request_obj = pull_request_ver
724 725 _org_pull_request_obj = pull_request_ver.pull_request
725 726 at_version = pull_request_ver.pull_request_version_id
726 727 else:
727 728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 729 pull_request_id)
729 730
730 731 pull_request_display_obj = PullRequest.get_pr_display_object(
731 732 pull_request_obj, _org_pull_request_obj)
732 733
733 734 return _org_pull_request_obj, pull_request_obj, \
734 735 pull_request_display_obj, at_version
735 736
736 737 def pr_commits_versions(self, versions):
737 738 """
738 739 Maps the pull-request commits into all known PR versions. This way we can obtain
739 740 each pr version the commit was introduced in.
740 741 """
741 742 commit_versions = collections.defaultdict(list)
742 743 num_versions = [x.pull_request_version_id for x in versions]
743 744 for ver in versions:
744 745 for commit_id in ver.revisions:
745 746 ver_idx = ChangesetComment.get_index_from_version(
746 747 ver.pull_request_version_id, num_versions=num_versions)
747 748 commit_versions[commit_id].append(ver_idx)
748 749 return commit_versions
749 750
750 751 def create(self, created_by, source_repo, source_ref, target_repo,
751 752 target_ref, revisions, reviewers, observers, title, description=None,
752 753 common_ancestor_id=None,
753 754 description_renderer=None,
754 755 reviewer_data=None, translator=None, auth_user=None):
755 756 translator = translator or get_current_request().translate
756 757
757 758 created_by_user = self._get_user(created_by)
758 759 auth_user = auth_user or created_by_user.AuthUser()
759 760 source_repo = self._get_repo(source_repo)
760 761 target_repo = self._get_repo(target_repo)
761 762
762 763 pull_request = PullRequest()
763 764 pull_request.source_repo = source_repo
764 765 pull_request.source_ref = source_ref
765 766 pull_request.target_repo = target_repo
766 767 pull_request.target_ref = target_ref
767 768 pull_request.revisions = revisions
768 769 pull_request.title = title
769 770 pull_request.description = description
770 771 pull_request.description_renderer = description_renderer
771 772 pull_request.author = created_by_user
772 773 pull_request.reviewer_data = reviewer_data
773 774 pull_request.pull_request_state = pull_request.STATE_CREATING
774 775 pull_request.common_ancestor_id = common_ancestor_id
775 776
776 777 Session().add(pull_request)
777 778 Session().flush()
778 779
779 780 reviewer_ids = set()
780 781 # members / reviewers
781 782 for reviewer_object in reviewers:
782 783 user_id, reasons, mandatory, role, rules = reviewer_object
783 784 user = self._get_user(user_id)
784 785
785 786 # skip duplicates
786 787 if user.user_id in reviewer_ids:
787 788 continue
788 789
789 790 reviewer_ids.add(user.user_id)
790 791
791 792 reviewer = PullRequestReviewers()
792 793 reviewer.user = user
793 794 reviewer.pull_request = pull_request
794 795 reviewer.reasons = reasons
795 796 reviewer.mandatory = mandatory
796 797 reviewer.role = role
797 798
798 799 # NOTE(marcink): pick only first rule for now
799 800 rule_id = list(rules)[0] if rules else None
800 801 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 802 if rule:
802 803 review_group = rule.user_group_vote_rule(user_id)
803 804 # we check if this particular reviewer is member of a voting group
804 805 if review_group:
805 806 # NOTE(marcink):
806 807 # can be that user is member of more but we pick the first same,
807 808 # same as default reviewers algo
808 809 review_group = review_group[0]
809 810
810 811 rule_data = {
811 812 'rule_name':
812 813 rule.review_rule_name,
813 814 'rule_user_group_entry_id':
814 815 review_group.repo_review_rule_users_group_id,
815 816 'rule_user_group_name':
816 817 review_group.users_group.users_group_name,
817 818 'rule_user_group_members':
818 819 [x.user.username for x in review_group.users_group.members],
819 820 'rule_user_group_members_id':
820 821 [x.user.user_id for x in review_group.users_group.members],
821 822 }
822 823 # e.g {'vote_rule': -1, 'mandatory': True}
823 824 rule_data.update(review_group.rule_data())
824 825
825 826 reviewer.rule_data = rule_data
826 827
827 828 Session().add(reviewer)
828 829 Session().flush()
829 830
830 831 for observer_object in observers:
831 832 user_id, reasons, mandatory, role, rules = observer_object
832 833 user = self._get_user(user_id)
833 834
834 835 # skip duplicates from reviewers
835 836 if user.user_id in reviewer_ids:
836 837 continue
837 838
838 839 #reviewer_ids.add(user.user_id)
839 840
840 841 observer = PullRequestReviewers()
841 842 observer.user = user
842 843 observer.pull_request = pull_request
843 844 observer.reasons = reasons
844 845 observer.mandatory = mandatory
845 846 observer.role = role
846 847
847 848 # NOTE(marcink): pick only first rule for now
848 849 rule_id = list(rules)[0] if rules else None
849 850 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 851 if rule:
851 852 # TODO(marcink): do we need this for observers ??
852 853 pass
853 854
854 855 Session().add(observer)
855 856 Session().flush()
856 857
857 858 # Set approval status to "Under Review" for all commits which are
858 859 # part of this pull request.
859 860 ChangesetStatusModel().set_status(
860 861 repo=target_repo,
861 862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 863 user=created_by_user,
863 864 pull_request=pull_request
864 865 )
865 866 # we commit early at this point. This has to do with a fact
866 867 # that before queries do some row-locking. And because of that
867 868 # we need to commit and finish transaction before below validate call
868 869 # that for large repos could be long resulting in long row locks
869 870 Session().commit()
870 871
871 872 # prepare workspace, and run initial merge simulation. Set state during that
872 873 # operation
873 874 pull_request = PullRequest.get(pull_request.pull_request_id)
874 875
875 876 # set as merging, for merge simulation, and if finished to created so we mark
876 877 # simulation is working fine
877 878 with pull_request.set_state(PullRequest.STATE_MERGING,
878 879 final_state=PullRequest.STATE_CREATED) as state_obj:
879 880 MergeCheck.validate(
880 881 pull_request, auth_user=auth_user, translator=translator)
881 882
882 883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884 885
885 886 creation_data = pull_request.get_api_data(with_merge_state=False)
886 887 self._log_audit_action(
887 888 'repo.pull_request.create', {'data': creation_data},
888 889 auth_user, pull_request)
889 890
890 891 return pull_request
891 892
892 893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 894 pull_request = self.__get_pull_request(pull_request)
894 895 target_scm = pull_request.target_repo.scm_instance()
895 896 if action == 'create':
896 897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 898 elif action == 'merge':
898 899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 900 elif action == 'close':
900 901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 902 elif action == 'review_status_change':
902 903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 904 elif action == 'update':
904 905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 906 elif action == 'comment':
906 907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 908 elif action == 'comment_edit':
908 909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 910 else:
910 911 return
911 912
912 913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 914 pull_request, action, trigger_hook)
914 915 trigger_hook(
915 916 username=user.username,
916 917 repo_name=pull_request.target_repo.repo_name,
917 918 repo_type=target_scm.alias,
918 919 pull_request=pull_request,
919 920 data=data)
920 921
921 922 def _get_commit_ids(self, pull_request):
922 923 """
923 924 Return the commit ids of the merged pull request.
924 925
925 926 This method is not dealing correctly yet with the lack of autoupdates
926 927 nor with the implicit target updates.
927 928 For example: if a commit in the source repo is already in the target it
928 929 will be reported anyways.
929 930 """
930 931 merge_rev = pull_request.merge_rev
931 932 if merge_rev is None:
932 933 raise ValueError('This pull request was not merged yet')
933 934
934 935 commit_ids = list(pull_request.revisions)
935 936 if merge_rev not in commit_ids:
936 937 commit_ids.append(merge_rev)
937 938
938 939 return commit_ids
939 940
940 941 def merge_repo(self, pull_request, user, extras):
941 942 log.debug("Merging pull request %s", pull_request.pull_request_id)
942 943 extras['user_agent'] = 'internal-merge'
943 944 merge_state = self._merge_pull_request(pull_request, user, extras)
944 945 if merge_state.executed:
945 946 log.debug("Merge was successful, updating the pull request comments.")
946 947 self._comment_and_close_pr(pull_request, user, merge_state)
947 948
948 949 self._log_audit_action(
949 950 'repo.pull_request.merge',
950 951 {'merge_state': merge_state.__dict__},
951 952 user, pull_request)
952 953
953 954 else:
954 955 log.warn("Merge failed, not updating the pull request.")
955 956 return merge_state
956 957
957 958 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
958 959 target_vcs = pull_request.target_repo.scm_instance()
959 960 source_vcs = pull_request.source_repo.scm_instance()
960 961
961 962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
962 963 pr_id=pull_request.pull_request_id,
963 964 pr_title=pull_request.title,
964 965 source_repo=source_vcs.name,
965 966 source_ref_name=pull_request.source_ref_parts.name,
966 967 target_repo=target_vcs.name,
967 968 target_ref_name=pull_request.target_ref_parts.name,
968 969 )
969 970
970 971 workspace_id = self._workspace_id(pull_request)
971 972 repo_id = pull_request.target_repo.repo_id
972 973 use_rebase = self._use_rebase_for_merging(pull_request)
973 974 close_branch = self._close_branch_before_merging(pull_request)
974 975 user_name = self._user_name_for_merging(pull_request, user)
975 976
976 977 target_ref = self._refresh_reference(
977 978 pull_request.target_ref_parts, target_vcs)
978 979
979 980 callback_daemon, extras = prepare_callback_daemon(
980 981 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
981 982 host=vcs_settings.HOOKS_HOST,
982 983 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
983 984
984 985 with callback_daemon:
985 986 # TODO: johbo: Implement a clean way to run a config_override
986 987 # for a single call.
987 988 target_vcs.config.set(
988 989 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
989 990
990 991 merge_state = target_vcs.merge(
991 992 repo_id, workspace_id, target_ref, source_vcs,
992 993 pull_request.source_ref_parts,
993 994 user_name=user_name, user_email=user.email,
994 995 message=message, use_rebase=use_rebase,
995 996 close_branch=close_branch)
996 997 return merge_state
997 998
998 999 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
999 1000 pull_request.merge_rev = merge_state.merge_ref.commit_id
1000 1001 pull_request.updated_on = datetime.datetime.now()
1001 1002 close_msg = close_msg or 'Pull request merged and closed'
1002 1003
1003 1004 CommentsModel().create(
1004 1005 text=safe_unicode(close_msg),
1005 1006 repo=pull_request.target_repo.repo_id,
1006 1007 user=user.user_id,
1007 1008 pull_request=pull_request.pull_request_id,
1008 1009 f_path=None,
1009 1010 line_no=None,
1010 1011 closing_pr=True
1011 1012 )
1012 1013
1013 1014 Session().add(pull_request)
1014 1015 Session().flush()
1015 1016 # TODO: paris: replace invalidation with less radical solution
1016 1017 ScmModel().mark_for_invalidation(
1017 1018 pull_request.target_repo.repo_name)
1018 1019 self.trigger_pull_request_hook(pull_request, user, 'merge')
1019 1020
1020 1021 def has_valid_update_type(self, pull_request):
1021 1022 source_ref_type = pull_request.source_ref_parts.type
1022 1023 return source_ref_type in self.REF_TYPES
1023 1024
1024 1025 def get_flow_commits(self, pull_request):
1025 1026
1026 1027 # source repo
1027 1028 source_ref_name = pull_request.source_ref_parts.name
1028 1029 source_ref_type = pull_request.source_ref_parts.type
1029 1030 source_ref_id = pull_request.source_ref_parts.commit_id
1030 1031 source_repo = pull_request.source_repo.scm_instance()
1031 1032
1032 1033 try:
1033 1034 if source_ref_type in self.REF_TYPES:
1034 1035 source_commit = source_repo.get_commit(
1035 1036 source_ref_name, reference_obj=pull_request.source_ref_parts)
1036 1037 else:
1037 1038 source_commit = source_repo.get_commit(source_ref_id)
1038 1039 except CommitDoesNotExistError:
1039 1040 raise SourceRefMissing()
1040 1041
1041 1042 # target repo
1042 1043 target_ref_name = pull_request.target_ref_parts.name
1043 1044 target_ref_type = pull_request.target_ref_parts.type
1044 1045 target_ref_id = pull_request.target_ref_parts.commit_id
1045 1046 target_repo = pull_request.target_repo.scm_instance()
1046 1047
1047 1048 try:
1048 1049 if target_ref_type in self.REF_TYPES:
1049 1050 target_commit = target_repo.get_commit(
1050 1051 target_ref_name, reference_obj=pull_request.target_ref_parts)
1051 1052 else:
1052 1053 target_commit = target_repo.get_commit(target_ref_id)
1053 1054 except CommitDoesNotExistError:
1054 1055 raise TargetRefMissing()
1055 1056
1056 1057 return source_commit, target_commit
1057 1058
1058 1059 def update_commits(self, pull_request, updating_user):
1059 1060 """
1060 1061 Get the updated list of commits for the pull request
1061 1062 and return the new pull request version and the list
1062 1063 of commits processed by this update action
1063 1064
1064 1065 updating_user is the user_object who triggered the update
1065 1066 """
1066 1067 pull_request = self.__get_pull_request(pull_request)
1067 1068 source_ref_type = pull_request.source_ref_parts.type
1068 1069 source_ref_name = pull_request.source_ref_parts.name
1069 1070 source_ref_id = pull_request.source_ref_parts.commit_id
1070 1071
1071 1072 target_ref_type = pull_request.target_ref_parts.type
1072 1073 target_ref_name = pull_request.target_ref_parts.name
1073 1074 target_ref_id = pull_request.target_ref_parts.commit_id
1074 1075
1075 1076 if not self.has_valid_update_type(pull_request):
1076 1077 log.debug("Skipping update of pull request %s due to ref type: %s",
1077 1078 pull_request, source_ref_type)
1078 1079 return UpdateResponse(
1079 1080 executed=False,
1080 1081 reason=UpdateFailureReason.WRONG_REF_TYPE,
1081 1082 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1082 1083 source_changed=False, target_changed=False)
1083 1084
1084 1085 try:
1085 1086 source_commit, target_commit = self.get_flow_commits(pull_request)
1086 1087 except SourceRefMissing:
1087 1088 return UpdateResponse(
1088 1089 executed=False,
1089 1090 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1090 1091 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1091 1092 source_changed=False, target_changed=False)
1092 1093 except TargetRefMissing:
1093 1094 return UpdateResponse(
1094 1095 executed=False,
1095 1096 reason=UpdateFailureReason.MISSING_TARGET_REF,
1096 1097 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1097 1098 source_changed=False, target_changed=False)
1098 1099
1099 1100 source_changed = source_ref_id != source_commit.raw_id
1100 1101 target_changed = target_ref_id != target_commit.raw_id
1101 1102
1102 1103 if not (source_changed or target_changed):
1103 1104 log.debug("Nothing changed in pull request %s", pull_request)
1104 1105 return UpdateResponse(
1105 1106 executed=False,
1106 1107 reason=UpdateFailureReason.NO_CHANGE,
1107 1108 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1108 1109 source_changed=target_changed, target_changed=source_changed)
1109 1110
1110 1111 change_in_found = 'target repo' if target_changed else 'source repo'
1111 1112 log.debug('Updating pull request because of change in %s detected',
1112 1113 change_in_found)
1113 1114
1114 1115 # Finally there is a need for an update, in case of source change
1115 1116 # we create a new version, else just an update
1116 1117 if source_changed:
1117 1118 pull_request_version = self._create_version_from_snapshot(pull_request)
1118 1119 self._link_comments_to_version(pull_request_version)
1119 1120 else:
1120 1121 try:
1121 1122 ver = pull_request.versions[-1]
1122 1123 except IndexError:
1123 1124 ver = None
1124 1125
1125 1126 pull_request.pull_request_version_id = \
1126 1127 ver.pull_request_version_id if ver else None
1127 1128 pull_request_version = pull_request
1128 1129
1129 1130 source_repo = pull_request.source_repo.scm_instance()
1130 1131 target_repo = pull_request.target_repo.scm_instance()
1131 1132
1132 1133 # re-compute commit ids
1133 1134 old_commit_ids = pull_request.revisions
1134 1135 pre_load = ["author", "date", "message", "branch"]
1135 1136 commit_ranges = target_repo.compare(
1136 1137 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1137 1138 pre_load=pre_load)
1138 1139
1139 1140 target_ref = target_commit.raw_id
1140 1141 source_ref = source_commit.raw_id
1141 1142 ancestor_commit_id = target_repo.get_common_ancestor(
1142 1143 target_ref, source_ref, source_repo)
1143 1144
1144 1145 if not ancestor_commit_id:
1145 1146 raise ValueError(
1146 1147 'cannot calculate diff info without a common ancestor. '
1147 1148 'Make sure both repositories are related, and have a common forking commit.')
1148 1149
1149 1150 pull_request.common_ancestor_id = ancestor_commit_id
1150 1151
1151 1152 pull_request.source_ref = '%s:%s:%s' % (
1152 1153 source_ref_type, source_ref_name, source_commit.raw_id)
1153 1154 pull_request.target_ref = '%s:%s:%s' % (
1154 1155 target_ref_type, target_ref_name, ancestor_commit_id)
1155 1156
1156 1157 pull_request.revisions = [
1157 1158 commit.raw_id for commit in reversed(commit_ranges)]
1158 1159 pull_request.updated_on = datetime.datetime.now()
1159 1160 Session().add(pull_request)
1160 1161 new_commit_ids = pull_request.revisions
1161 1162
1162 1163 old_diff_data, new_diff_data = self._generate_update_diffs(
1163 1164 pull_request, pull_request_version)
1164 1165
1165 1166 # calculate commit and file changes
1166 1167 commit_changes = self._calculate_commit_id_changes(
1167 1168 old_commit_ids, new_commit_ids)
1168 1169 file_changes = self._calculate_file_changes(
1169 1170 old_diff_data, new_diff_data)
1170 1171
1171 1172 # set comments as outdated if DIFFS changed
1172 1173 CommentsModel().outdate_comments(
1173 1174 pull_request, old_diff_data=old_diff_data,
1174 1175 new_diff_data=new_diff_data)
1175 1176
1176 1177 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1177 1178 file_node_changes = (
1178 1179 file_changes.added or file_changes.modified or file_changes.removed)
1179 1180 pr_has_changes = valid_commit_changes or file_node_changes
1180 1181
1181 1182 # Add an automatic comment to the pull request, in case
1182 1183 # anything has changed
1183 1184 if pr_has_changes:
1184 1185 update_comment = CommentsModel().create(
1185 1186 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1186 1187 repo=pull_request.target_repo,
1187 1188 user=pull_request.author,
1188 1189 pull_request=pull_request,
1189 1190 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1190 1191
1191 1192 # Update status to "Under Review" for added commits
1192 1193 for commit_id in commit_changes.added:
1193 1194 ChangesetStatusModel().set_status(
1194 1195 repo=pull_request.source_repo,
1195 1196 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1196 1197 comment=update_comment,
1197 1198 user=pull_request.author,
1198 1199 pull_request=pull_request,
1199 1200 revision=commit_id)
1200 1201
1201 1202 # initial commit
1202 1203 Session().commit()
1203 1204
1204 1205 if pr_has_changes:
1205 1206 # send update email to users
1206 1207 try:
1207 1208 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1208 1209 ancestor_commit_id=ancestor_commit_id,
1209 1210 commit_changes=commit_changes,
1210 1211 file_changes=file_changes)
1211 1212 Session().commit()
1212 1213 except Exception:
1213 1214 log.exception('Failed to send email notification to users')
1214 1215 Session().rollback()
1215 1216
1216 1217 log.debug(
1217 1218 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1218 1219 'removed_ids: %s', pull_request.pull_request_id,
1219 1220 commit_changes.added, commit_changes.common, commit_changes.removed)
1220 1221 log.debug(
1221 1222 'Updated pull request with the following file changes: %s',
1222 1223 file_changes)
1223 1224
1224 1225 log.info(
1225 1226 "Updated pull request %s from commit %s to commit %s, "
1226 1227 "stored new version %s of this pull request.",
1227 1228 pull_request.pull_request_id, source_ref_id,
1228 1229 pull_request.source_ref_parts.commit_id,
1229 1230 pull_request_version.pull_request_version_id)
1230 1231
1231 1232 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1232 1233
1233 1234 return UpdateResponse(
1234 1235 executed=True, reason=UpdateFailureReason.NONE,
1235 1236 old=pull_request, new=pull_request_version,
1236 1237 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1237 1238 source_changed=source_changed, target_changed=target_changed)
1238 1239
1239 1240 def _create_version_from_snapshot(self, pull_request):
1240 1241 version = PullRequestVersion()
1241 1242 version.title = pull_request.title
1242 1243 version.description = pull_request.description
1243 1244 version.status = pull_request.status
1244 1245 version.pull_request_state = pull_request.pull_request_state
1245 1246 version.created_on = datetime.datetime.now()
1246 1247 version.updated_on = pull_request.updated_on
1247 1248 version.user_id = pull_request.user_id
1248 1249 version.source_repo = pull_request.source_repo
1249 1250 version.source_ref = pull_request.source_ref
1250 1251 version.target_repo = pull_request.target_repo
1251 1252 version.target_ref = pull_request.target_ref
1252 1253
1253 1254 version._last_merge_source_rev = pull_request._last_merge_source_rev
1254 1255 version._last_merge_target_rev = pull_request._last_merge_target_rev
1255 1256 version.last_merge_status = pull_request.last_merge_status
1256 1257 version.last_merge_metadata = pull_request.last_merge_metadata
1257 1258 version.shadow_merge_ref = pull_request.shadow_merge_ref
1258 1259 version.merge_rev = pull_request.merge_rev
1259 1260 version.reviewer_data = pull_request.reviewer_data
1260 1261
1261 1262 version.revisions = pull_request.revisions
1262 1263 version.common_ancestor_id = pull_request.common_ancestor_id
1263 1264 version.pull_request = pull_request
1264 1265 Session().add(version)
1265 1266 Session().flush()
1266 1267
1267 1268 return version
1268 1269
1269 1270 def _generate_update_diffs(self, pull_request, pull_request_version):
1270 1271
1271 1272 diff_context = (
1272 1273 self.DIFF_CONTEXT +
1273 1274 CommentsModel.needed_extra_diff_context())
1274 1275 hide_whitespace_changes = False
1275 1276 source_repo = pull_request_version.source_repo
1276 1277 source_ref_id = pull_request_version.source_ref_parts.commit_id
1277 1278 target_ref_id = pull_request_version.target_ref_parts.commit_id
1278 1279 old_diff = self._get_diff_from_pr_or_version(
1279 1280 source_repo, source_ref_id, target_ref_id,
1280 1281 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1281 1282
1282 1283 source_repo = pull_request.source_repo
1283 1284 source_ref_id = pull_request.source_ref_parts.commit_id
1284 1285 target_ref_id = pull_request.target_ref_parts.commit_id
1285 1286
1286 1287 new_diff = self._get_diff_from_pr_or_version(
1287 1288 source_repo, source_ref_id, target_ref_id,
1288 1289 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1289 1290
1290 1291 old_diff_data = diffs.DiffProcessor(old_diff)
1291 1292 old_diff_data.prepare()
1292 1293 new_diff_data = diffs.DiffProcessor(new_diff)
1293 1294 new_diff_data.prepare()
1294 1295
1295 1296 return old_diff_data, new_diff_data
1296 1297
1297 1298 def _link_comments_to_version(self, pull_request_version):
1298 1299 """
1299 1300 Link all unlinked comments of this pull request to the given version.
1300 1301
1301 1302 :param pull_request_version: The `PullRequestVersion` to which
1302 1303 the comments shall be linked.
1303 1304
1304 1305 """
1305 1306 pull_request = pull_request_version.pull_request
1306 1307 comments = ChangesetComment.query()\
1307 1308 .filter(
1308 1309 # TODO: johbo: Should we query for the repo at all here?
1309 1310 # Pending decision on how comments of PRs are to be related
1310 1311 # to either the source repo, the target repo or no repo at all.
1311 1312 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1312 1313 ChangesetComment.pull_request == pull_request,
1313 1314 ChangesetComment.pull_request_version == None)\
1314 1315 .order_by(ChangesetComment.comment_id.asc())
1315 1316
1316 1317 # TODO: johbo: Find out why this breaks if it is done in a bulk
1317 1318 # operation.
1318 1319 for comment in comments:
1319 1320 comment.pull_request_version_id = (
1320 1321 pull_request_version.pull_request_version_id)
1321 1322 Session().add(comment)
1322 1323
1323 1324 def _calculate_commit_id_changes(self, old_ids, new_ids):
1324 1325 added = [x for x in new_ids if x not in old_ids]
1325 1326 common = [x for x in new_ids if x in old_ids]
1326 1327 removed = [x for x in old_ids if x not in new_ids]
1327 1328 total = new_ids
1328 1329 return ChangeTuple(added, common, removed, total)
1329 1330
1330 1331 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1331 1332
1332 1333 old_files = OrderedDict()
1333 1334 for diff_data in old_diff_data.parsed_diff:
1334 1335 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1335 1336
1336 1337 added_files = []
1337 1338 modified_files = []
1338 1339 removed_files = []
1339 1340 for diff_data in new_diff_data.parsed_diff:
1340 1341 new_filename = diff_data['filename']
1341 1342 new_hash = md5_safe(diff_data['raw_diff'])
1342 1343
1343 1344 old_hash = old_files.get(new_filename)
1344 1345 if not old_hash:
1345 1346 # file is not present in old diff, we have to figure out from parsed diff
1346 1347 # operation ADD/REMOVE
1347 1348 operations_dict = diff_data['stats']['ops']
1348 1349 if diffs.DEL_FILENODE in operations_dict:
1349 1350 removed_files.append(new_filename)
1350 1351 else:
1351 1352 added_files.append(new_filename)
1352 1353 else:
1353 1354 if new_hash != old_hash:
1354 1355 modified_files.append(new_filename)
1355 1356 # now remove a file from old, since we have seen it already
1356 1357 del old_files[new_filename]
1357 1358
1358 1359 # removed files is when there are present in old, but not in NEW,
1359 1360 # since we remove old files that are present in new diff, left-overs
1360 1361 # if any should be the removed files
1361 1362 removed_files.extend(old_files.keys())
1362 1363
1363 1364 return FileChangeTuple(added_files, modified_files, removed_files)
1364 1365
1365 1366 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1366 1367 """
1367 1368 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1368 1369 so it's always looking the same disregarding on which default
1369 1370 renderer system is using.
1370 1371
1371 1372 :param ancestor_commit_id: ancestor raw_id
1372 1373 :param changes: changes named tuple
1373 1374 :param file_changes: file changes named tuple
1374 1375
1375 1376 """
1376 1377 new_status = ChangesetStatus.get_status_lbl(
1377 1378 ChangesetStatus.STATUS_UNDER_REVIEW)
1378 1379
1379 1380 changed_files = (
1380 1381 file_changes.added + file_changes.modified + file_changes.removed)
1381 1382
1382 1383 params = {
1383 1384 'under_review_label': new_status,
1384 1385 'added_commits': changes.added,
1385 1386 'removed_commits': changes.removed,
1386 1387 'changed_files': changed_files,
1387 1388 'added_files': file_changes.added,
1388 1389 'modified_files': file_changes.modified,
1389 1390 'removed_files': file_changes.removed,
1390 1391 'ancestor_commit_id': ancestor_commit_id
1391 1392 }
1392 1393 renderer = RstTemplateRenderer()
1393 1394 return renderer.render('pull_request_update.mako', **params)
1394 1395
1395 1396 def edit(self, pull_request, title, description, description_renderer, user):
1396 1397 pull_request = self.__get_pull_request(pull_request)
1397 1398 old_data = pull_request.get_api_data(with_merge_state=False)
1398 1399 if pull_request.is_closed():
1399 1400 raise ValueError('This pull request is closed')
1400 1401 if title:
1401 1402 pull_request.title = title
1402 1403 pull_request.description = description
1403 1404 pull_request.updated_on = datetime.datetime.now()
1404 1405 pull_request.description_renderer = description_renderer
1405 1406 Session().add(pull_request)
1406 1407 self._log_audit_action(
1407 1408 'repo.pull_request.edit', {'old_data': old_data},
1408 1409 user, pull_request)
1409 1410
1410 1411 def update_reviewers(self, pull_request, reviewer_data, user):
1411 1412 """
1412 1413 Update the reviewers in the pull request
1413 1414
1414 1415 :param pull_request: the pr to update
1415 1416 :param reviewer_data: list of tuples
1416 1417 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1417 1418 :param user: current use who triggers this action
1418 1419 """
1419 1420
1420 1421 pull_request = self.__get_pull_request(pull_request)
1421 1422 if pull_request.is_closed():
1422 1423 raise ValueError('This pull request is closed')
1423 1424
1424 1425 reviewers = {}
1425 1426 for user_id, reasons, mandatory, role, rules in reviewer_data:
1426 1427 if isinstance(user_id, (int, compat.string_types)):
1427 1428 user_id = self._get_user(user_id).user_id
1428 1429 reviewers[user_id] = {
1429 1430 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1430 1431
1431 1432 reviewers_ids = set(reviewers.keys())
1432 1433 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1433 1434 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1434 1435
1435 1436 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1436 1437
1437 1438 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1438 1439 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1439 1440
1440 1441 log.debug("Adding %s reviewers", ids_to_add)
1441 1442 log.debug("Removing %s reviewers", ids_to_remove)
1442 1443 changed = False
1443 1444 added_audit_reviewers = []
1444 1445 removed_audit_reviewers = []
1445 1446
1446 1447 for uid in ids_to_add:
1447 1448 changed = True
1448 1449 _usr = self._get_user(uid)
1449 1450 reviewer = PullRequestReviewers()
1450 1451 reviewer.user = _usr
1451 1452 reviewer.pull_request = pull_request
1452 1453 reviewer.reasons = reviewers[uid]['reasons']
1453 1454 # NOTE(marcink): mandatory shouldn't be changed now
1454 1455 # reviewer.mandatory = reviewers[uid]['reasons']
1455 1456 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1456 1457 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1457 1458 Session().add(reviewer)
1458 1459 added_audit_reviewers.append(reviewer.get_dict())
1459 1460
1460 1461 for uid in ids_to_remove:
1461 1462 changed = True
1462 1463 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1463 1464 # This is an edge case that handles previous state of having the same reviewer twice.
1464 1465 # this CAN happen due to the lack of DB checks
1465 1466 reviewers = PullRequestReviewers.query()\
1466 1467 .filter(PullRequestReviewers.user_id == uid,
1467 1468 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1468 1469 PullRequestReviewers.pull_request == pull_request)\
1469 1470 .all()
1470 1471
1471 1472 for obj in reviewers:
1472 1473 added_audit_reviewers.append(obj.get_dict())
1473 1474 Session().delete(obj)
1474 1475
1475 1476 if changed:
1476 1477 Session().expire_all()
1477 1478 pull_request.updated_on = datetime.datetime.now()
1478 1479 Session().add(pull_request)
1479 1480
1480 1481 # finally store audit logs
1481 1482 for user_data in added_audit_reviewers:
1482 1483 self._log_audit_action(
1483 1484 'repo.pull_request.reviewer.add', {'data': user_data},
1484 1485 user, pull_request)
1485 1486 for user_data in removed_audit_reviewers:
1486 1487 self._log_audit_action(
1487 1488 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1488 1489 user, pull_request)
1489 1490
1490 1491 self.notify_reviewers(pull_request, ids_to_add, user)
1491 1492 return ids_to_add, ids_to_remove
1492 1493
1493 1494 def update_observers(self, pull_request, observer_data, user):
1494 1495 """
1495 1496 Update the observers in the pull request
1496 1497
1497 1498 :param pull_request: the pr to update
1498 1499 :param observer_data: list of tuples
1499 1500 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1500 1501 :param user: current use who triggers this action
1501 1502 """
1502 1503 pull_request = self.__get_pull_request(pull_request)
1503 1504 if pull_request.is_closed():
1504 1505 raise ValueError('This pull request is closed')
1505 1506
1506 1507 observers = {}
1507 1508 for user_id, reasons, mandatory, role, rules in observer_data:
1508 1509 if isinstance(user_id, (int, compat.string_types)):
1509 1510 user_id = self._get_user(user_id).user_id
1510 1511 observers[user_id] = {
1511 1512 'reasons': reasons, 'observers': mandatory, 'role': role}
1512 1513
1513 1514 observers_ids = set(observers.keys())
1514 1515 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1515 1516 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1516 1517
1517 1518 current_observers_ids = set([x.user.user_id for x in current_observers])
1518 1519
1519 1520 ids_to_add = observers_ids.difference(current_observers_ids)
1520 1521 ids_to_remove = current_observers_ids.difference(observers_ids)
1521 1522
1522 1523 log.debug("Adding %s observer", ids_to_add)
1523 1524 log.debug("Removing %s observer", ids_to_remove)
1524 1525 changed = False
1525 1526 added_audit_observers = []
1526 1527 removed_audit_observers = []
1527 1528
1528 1529 for uid in ids_to_add:
1529 1530 changed = True
1530 1531 _usr = self._get_user(uid)
1531 1532 observer = PullRequestReviewers()
1532 1533 observer.user = _usr
1533 1534 observer.pull_request = pull_request
1534 1535 observer.reasons = observers[uid]['reasons']
1535 1536 # NOTE(marcink): mandatory shouldn't be changed now
1536 1537 # observer.mandatory = observer[uid]['reasons']
1537 1538
1538 1539 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1539 1540 observer.role = PullRequestReviewers.ROLE_OBSERVER
1540 1541 Session().add(observer)
1541 1542 added_audit_observers.append(observer.get_dict())
1542 1543
1543 1544 for uid in ids_to_remove:
1544 1545 changed = True
1545 1546 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1546 1547 # This is an edge case that handles previous state of having the same reviewer twice.
1547 1548 # this CAN happen due to the lack of DB checks
1548 1549 observers = PullRequestReviewers.query()\
1549 1550 .filter(PullRequestReviewers.user_id == uid,
1550 1551 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1551 1552 PullRequestReviewers.pull_request == pull_request)\
1552 1553 .all()
1553 1554
1554 1555 for obj in observers:
1555 1556 added_audit_observers.append(obj.get_dict())
1556 1557 Session().delete(obj)
1557 1558
1558 1559 if changed:
1559 1560 Session().expire_all()
1560 1561 pull_request.updated_on = datetime.datetime.now()
1561 1562 Session().add(pull_request)
1562 1563
1563 1564 # finally store audit logs
1564 1565 for user_data in added_audit_observers:
1565 1566 self._log_audit_action(
1566 1567 'repo.pull_request.observer.add', {'data': user_data},
1567 1568 user, pull_request)
1568 1569 for user_data in removed_audit_observers:
1569 1570 self._log_audit_action(
1570 1571 'repo.pull_request.observer.delete', {'old_data': user_data},
1571 1572 user, pull_request)
1572 1573
1573 1574 self.notify_observers(pull_request, ids_to_add, user)
1574 1575 return ids_to_add, ids_to_remove
1575 1576
1576 1577 def get_url(self, pull_request, request=None, permalink=False):
1577 1578 if not request:
1578 1579 request = get_current_request()
1579 1580
1580 1581 if permalink:
1581 1582 return request.route_url(
1582 1583 'pull_requests_global',
1583 1584 pull_request_id=pull_request.pull_request_id,)
1584 1585 else:
1585 1586 return request.route_url('pullrequest_show',
1586 1587 repo_name=safe_str(pull_request.target_repo.repo_name),
1587 1588 pull_request_id=pull_request.pull_request_id,)
1588 1589
1589 1590 def get_shadow_clone_url(self, pull_request, request=None):
1590 1591 """
1591 1592 Returns qualified url pointing to the shadow repository. If this pull
1592 1593 request is closed there is no shadow repository and ``None`` will be
1593 1594 returned.
1594 1595 """
1595 1596 if pull_request.is_closed():
1596 1597 return None
1597 1598 else:
1598 1599 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1599 1600 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1600 1601
1601 1602 def _notify_reviewers(self, pull_request, user_ids, role, user):
1602 1603 # notification to reviewers/observers
1603 1604 if not user_ids:
1604 1605 return
1605 1606
1606 1607 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1607 1608
1608 1609 pull_request_obj = pull_request
1609 1610 # get the current participants of this pull request
1610 1611 recipients = user_ids
1611 1612 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1612 1613
1613 1614 pr_source_repo = pull_request_obj.source_repo
1614 1615 pr_target_repo = pull_request_obj.target_repo
1615 1616
1616 1617 pr_url = h.route_url('pullrequest_show',
1617 1618 repo_name=pr_target_repo.repo_name,
1618 1619 pull_request_id=pull_request_obj.pull_request_id,)
1619 1620
1620 1621 # set some variables for email notification
1621 1622 pr_target_repo_url = h.route_url(
1622 1623 'repo_summary', repo_name=pr_target_repo.repo_name)
1623 1624
1624 1625 pr_source_repo_url = h.route_url(
1625 1626 'repo_summary', repo_name=pr_source_repo.repo_name)
1626 1627
1627 1628 # pull request specifics
1628 1629 pull_request_commits = [
1629 1630 (x.raw_id, x.message)
1630 1631 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1631 1632
1632 1633 current_rhodecode_user = user
1633 1634 kwargs = {
1634 1635 'user': current_rhodecode_user,
1635 1636 'pull_request_author': pull_request.author,
1636 1637 'pull_request': pull_request_obj,
1637 1638 'pull_request_commits': pull_request_commits,
1638 1639
1639 1640 'pull_request_target_repo': pr_target_repo,
1640 1641 'pull_request_target_repo_url': pr_target_repo_url,
1641 1642
1642 1643 'pull_request_source_repo': pr_source_repo,
1643 1644 'pull_request_source_repo_url': pr_source_repo_url,
1644 1645
1645 1646 'pull_request_url': pr_url,
1646 1647 'thread_ids': [pr_url],
1647 1648 'user_role': role
1648 1649 }
1649 1650
1650 1651 # create notification objects, and emails
1651 1652 NotificationModel().create(
1652 1653 created_by=current_rhodecode_user,
1653 1654 notification_subject='', # Filled in based on the notification_type
1654 1655 notification_body='', # Filled in based on the notification_type
1655 1656 notification_type=notification_type,
1656 1657 recipients=recipients,
1657 1658 email_kwargs=kwargs,
1658 1659 )
1659 1660
1660 1661 def notify_reviewers(self, pull_request, reviewers_ids, user):
1661 1662 return self._notify_reviewers(pull_request, reviewers_ids,
1662 1663 PullRequestReviewers.ROLE_REVIEWER, user)
1663 1664
1664 1665 def notify_observers(self, pull_request, observers_ids, user):
1665 1666 return self._notify_reviewers(pull_request, observers_ids,
1666 1667 PullRequestReviewers.ROLE_OBSERVER, user)
1667 1668
1668 1669 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1669 1670 commit_changes, file_changes):
1670 1671
1671 1672 updating_user_id = updating_user.user_id
1672 1673 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1673 1674 # NOTE(marcink): send notification to all other users except to
1674 1675 # person who updated the PR
1675 1676 recipients = reviewers.difference(set([updating_user_id]))
1676 1677
1677 1678 log.debug('Notify following recipients about pull-request update %s', recipients)
1678 1679
1679 1680 pull_request_obj = pull_request
1680 1681
1681 1682 # send email about the update
1682 1683 changed_files = (
1683 1684 file_changes.added + file_changes.modified + file_changes.removed)
1684 1685
1685 1686 pr_source_repo = pull_request_obj.source_repo
1686 1687 pr_target_repo = pull_request_obj.target_repo
1687 1688
1688 1689 pr_url = h.route_url('pullrequest_show',
1689 1690 repo_name=pr_target_repo.repo_name,
1690 1691 pull_request_id=pull_request_obj.pull_request_id,)
1691 1692
1692 1693 # set some variables for email notification
1693 1694 pr_target_repo_url = h.route_url(
1694 1695 'repo_summary', repo_name=pr_target_repo.repo_name)
1695 1696
1696 1697 pr_source_repo_url = h.route_url(
1697 1698 'repo_summary', repo_name=pr_source_repo.repo_name)
1698 1699
1699 1700 email_kwargs = {
1700 1701 'date': datetime.datetime.now(),
1701 1702 'updating_user': updating_user,
1702 1703
1703 1704 'pull_request': pull_request_obj,
1704 1705
1705 1706 'pull_request_target_repo': pr_target_repo,
1706 1707 'pull_request_target_repo_url': pr_target_repo_url,
1707 1708
1708 1709 'pull_request_source_repo': pr_source_repo,
1709 1710 'pull_request_source_repo_url': pr_source_repo_url,
1710 1711
1711 1712 'pull_request_url': pr_url,
1712 1713
1713 1714 'ancestor_commit_id': ancestor_commit_id,
1714 1715 'added_commits': commit_changes.added,
1715 1716 'removed_commits': commit_changes.removed,
1716 1717 'changed_files': changed_files,
1717 1718 'added_files': file_changes.added,
1718 1719 'modified_files': file_changes.modified,
1719 1720 'removed_files': file_changes.removed,
1720 1721 'thread_ids': [pr_url],
1721 1722 }
1722 1723
1723 1724 # create notification objects, and emails
1724 1725 NotificationModel().create(
1725 1726 created_by=updating_user,
1726 1727 notification_subject='', # Filled in based on the notification_type
1727 1728 notification_body='', # Filled in based on the notification_type
1728 1729 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1729 1730 recipients=recipients,
1730 1731 email_kwargs=email_kwargs,
1731 1732 )
1732 1733
1733 1734 def delete(self, pull_request, user=None):
1734 1735 if not user:
1735 1736 user = getattr(get_current_rhodecode_user(), 'username', None)
1736 1737
1737 1738 pull_request = self.__get_pull_request(pull_request)
1738 1739 old_data = pull_request.get_api_data(with_merge_state=False)
1739 1740 self._cleanup_merge_workspace(pull_request)
1740 1741 self._log_audit_action(
1741 1742 'repo.pull_request.delete', {'old_data': old_data},
1742 1743 user, pull_request)
1743 1744 Session().delete(pull_request)
1744 1745
1745 1746 def close_pull_request(self, pull_request, user):
1746 1747 pull_request = self.__get_pull_request(pull_request)
1747 1748 self._cleanup_merge_workspace(pull_request)
1748 1749 pull_request.status = PullRequest.STATUS_CLOSED
1749 1750 pull_request.updated_on = datetime.datetime.now()
1750 1751 Session().add(pull_request)
1751 1752 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1752 1753
1753 1754 pr_data = pull_request.get_api_data(with_merge_state=False)
1754 1755 self._log_audit_action(
1755 1756 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1756 1757
1757 1758 def close_pull_request_with_comment(
1758 1759 self, pull_request, user, repo, message=None, auth_user=None):
1759 1760
1760 1761 pull_request_review_status = pull_request.calculated_review_status()
1761 1762
1762 1763 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1763 1764 # approved only if we have voting consent
1764 1765 status = ChangesetStatus.STATUS_APPROVED
1765 1766 else:
1766 1767 status = ChangesetStatus.STATUS_REJECTED
1767 1768 status_lbl = ChangesetStatus.get_status_lbl(status)
1768 1769
1769 1770 default_message = (
1770 1771 'Closing with status change {transition_icon} {status}.'
1771 1772 ).format(transition_icon='>', status=status_lbl)
1772 1773 text = message or default_message
1773 1774
1774 1775 # create a comment, and link it to new status
1775 1776 comment = CommentsModel().create(
1776 1777 text=text,
1777 1778 repo=repo.repo_id,
1778 1779 user=user.user_id,
1779 1780 pull_request=pull_request.pull_request_id,
1780 1781 status_change=status_lbl,
1781 1782 status_change_type=status,
1782 1783 closing_pr=True,
1783 1784 auth_user=auth_user,
1784 1785 )
1785 1786
1786 1787 # calculate old status before we change it
1787 1788 old_calculated_status = pull_request.calculated_review_status()
1788 1789 ChangesetStatusModel().set_status(
1789 1790 repo.repo_id,
1790 1791 status,
1791 1792 user.user_id,
1792 1793 comment=comment,
1793 1794 pull_request=pull_request.pull_request_id
1794 1795 )
1795 1796
1796 1797 Session().flush()
1797 1798
1798 1799 self.trigger_pull_request_hook(pull_request, user, 'comment',
1799 1800 data={'comment': comment})
1800 1801
1801 1802 # we now calculate the status of pull request again, and based on that
1802 1803 # calculation trigger status change. This might happen in cases
1803 1804 # that non-reviewer admin closes a pr, which means his vote doesn't
1804 1805 # change the status, while if he's a reviewer this might change it.
1805 1806 calculated_status = pull_request.calculated_review_status()
1806 1807 if old_calculated_status != calculated_status:
1807 1808 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1808 1809 data={'status': calculated_status})
1809 1810
1810 1811 # finally close the PR
1811 1812 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1812 1813
1813 1814 return comment, status
1814 1815
1815 1816 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1816 1817 _ = translator or get_current_request().translate
1817 1818
1818 1819 if not self._is_merge_enabled(pull_request):
1819 1820 return None, False, _('Server-side pull request merging is disabled.')
1820 1821
1821 1822 if pull_request.is_closed():
1822 1823 return None, False, _('This pull request is closed.')
1823 1824
1824 1825 merge_possible, msg = self._check_repo_requirements(
1825 1826 target=pull_request.target_repo, source=pull_request.source_repo,
1826 1827 translator=_)
1827 1828 if not merge_possible:
1828 1829 return None, merge_possible, msg
1829 1830
1830 1831 try:
1831 1832 merge_response = self._try_merge(
1832 1833 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1833 1834 log.debug("Merge response: %s", merge_response)
1834 1835 return merge_response, merge_response.possible, merge_response.merge_status_message
1835 1836 except NotImplementedError:
1836 1837 return None, False, _('Pull request merging is not supported.')
1837 1838
1838 1839 def _check_repo_requirements(self, target, source, translator):
1839 1840 """
1840 1841 Check if `target` and `source` have compatible requirements.
1841 1842
1842 1843 Currently this is just checking for largefiles.
1843 1844 """
1844 1845 _ = translator
1845 1846 target_has_largefiles = self._has_largefiles(target)
1846 1847 source_has_largefiles = self._has_largefiles(source)
1847 1848 merge_possible = True
1848 1849 message = u''
1849 1850
1850 1851 if target_has_largefiles != source_has_largefiles:
1851 1852 merge_possible = False
1852 1853 if source_has_largefiles:
1853 1854 message = _(
1854 1855 'Target repository large files support is disabled.')
1855 1856 else:
1856 1857 message = _(
1857 1858 'Source repository large files support is disabled.')
1858 1859
1859 1860 return merge_possible, message
1860 1861
1861 1862 def _has_largefiles(self, repo):
1862 1863 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1863 1864 'extensions', 'largefiles')
1864 1865 return largefiles_ui and largefiles_ui[0].active
1865 1866
1866 1867 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1867 1868 """
1868 1869 Try to merge the pull request and return the merge status.
1869 1870 """
1870 1871 log.debug(
1871 1872 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1872 1873 pull_request.pull_request_id, force_shadow_repo_refresh)
1873 1874 target_vcs = pull_request.target_repo.scm_instance()
1874 1875 # Refresh the target reference.
1875 1876 try:
1876 1877 target_ref = self._refresh_reference(
1877 1878 pull_request.target_ref_parts, target_vcs)
1878 1879 except CommitDoesNotExistError:
1879 1880 merge_state = MergeResponse(
1880 1881 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1881 1882 metadata={'target_ref': pull_request.target_ref_parts})
1882 1883 return merge_state
1883 1884
1884 1885 target_locked = pull_request.target_repo.locked
1885 1886 if target_locked and target_locked[0]:
1886 1887 locked_by = 'user:{}'.format(target_locked[0])
1887 1888 log.debug("The target repository is locked by %s.", locked_by)
1888 1889 merge_state = MergeResponse(
1889 1890 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1890 1891 metadata={'locked_by': locked_by})
1891 1892 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1892 1893 pull_request, target_ref):
1893 1894 log.debug("Refreshing the merge status of the repository.")
1894 1895 merge_state = self._refresh_merge_state(
1895 1896 pull_request, target_vcs, target_ref)
1896 1897 else:
1897 1898 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1898 1899 metadata = {
1899 1900 'unresolved_files': '',
1900 1901 'target_ref': pull_request.target_ref_parts,
1901 1902 'source_ref': pull_request.source_ref_parts,
1902 1903 }
1903 1904 if pull_request.last_merge_metadata:
1904 1905 metadata.update(pull_request.last_merge_metadata_parsed)
1905 1906
1906 1907 if not possible and target_ref.type == 'branch':
1907 1908 # NOTE(marcink): case for mercurial multiple heads on branch
1908 1909 heads = target_vcs._heads(target_ref.name)
1909 1910 if len(heads) != 1:
1910 1911 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1911 1912 metadata.update({
1912 1913 'heads': heads
1913 1914 })
1914 1915
1915 1916 merge_state = MergeResponse(
1916 1917 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1917 1918
1918 1919 return merge_state
1919 1920
1920 1921 def _refresh_reference(self, reference, vcs_repository):
1921 1922 if reference.type in self.UPDATABLE_REF_TYPES:
1922 1923 name_or_id = reference.name
1923 1924 else:
1924 1925 name_or_id = reference.commit_id
1925 1926
1926 1927 refreshed_commit = vcs_repository.get_commit(name_or_id)
1927 1928 refreshed_reference = Reference(
1928 1929 reference.type, reference.name, refreshed_commit.raw_id)
1929 1930 return refreshed_reference
1930 1931
1931 1932 def _needs_merge_state_refresh(self, pull_request, target_reference):
1932 1933 return not(
1933 1934 pull_request.revisions and
1934 1935 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1935 1936 target_reference.commit_id == pull_request._last_merge_target_rev)
1936 1937
1937 1938 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1938 1939 workspace_id = self._workspace_id(pull_request)
1939 1940 source_vcs = pull_request.source_repo.scm_instance()
1940 1941 repo_id = pull_request.target_repo.repo_id
1941 1942 use_rebase = self._use_rebase_for_merging(pull_request)
1942 1943 close_branch = self._close_branch_before_merging(pull_request)
1943 1944 merge_state = target_vcs.merge(
1944 1945 repo_id, workspace_id,
1945 1946 target_reference, source_vcs, pull_request.source_ref_parts,
1946 1947 dry_run=True, use_rebase=use_rebase,
1947 1948 close_branch=close_branch)
1948 1949
1949 1950 # Do not store the response if there was an unknown error.
1950 1951 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1951 1952 pull_request._last_merge_source_rev = \
1952 1953 pull_request.source_ref_parts.commit_id
1953 1954 pull_request._last_merge_target_rev = target_reference.commit_id
1954 1955 pull_request.last_merge_status = merge_state.failure_reason
1955 1956 pull_request.last_merge_metadata = merge_state.metadata
1956 1957
1957 1958 pull_request.shadow_merge_ref = merge_state.merge_ref
1958 1959 Session().add(pull_request)
1959 1960 Session().commit()
1960 1961
1961 1962 return merge_state
1962 1963
1963 1964 def _workspace_id(self, pull_request):
1964 1965 workspace_id = 'pr-%s' % pull_request.pull_request_id
1965 1966 return workspace_id
1966 1967
1967 1968 def generate_repo_data(self, repo, commit_id=None, branch=None,
1968 1969 bookmark=None, translator=None):
1969 1970 from rhodecode.model.repo import RepoModel
1970 1971
1971 1972 all_refs, selected_ref = \
1972 1973 self._get_repo_pullrequest_sources(
1973 1974 repo.scm_instance(), commit_id=commit_id,
1974 1975 branch=branch, bookmark=bookmark, translator=translator)
1975 1976
1976 1977 refs_select2 = []
1977 1978 for element in all_refs:
1978 1979 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1979 1980 refs_select2.append({'text': element[1], 'children': children})
1980 1981
1981 1982 return {
1982 1983 'user': {
1983 1984 'user_id': repo.user.user_id,
1984 1985 'username': repo.user.username,
1985 1986 'firstname': repo.user.first_name,
1986 1987 'lastname': repo.user.last_name,
1987 1988 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1988 1989 },
1989 1990 'name': repo.repo_name,
1990 1991 'link': RepoModel().get_url(repo),
1991 1992 'description': h.chop_at_smart(repo.description_safe, '\n'),
1992 1993 'refs': {
1993 1994 'all_refs': all_refs,
1994 1995 'selected_ref': selected_ref,
1995 1996 'select2_refs': refs_select2
1996 1997 }
1997 1998 }
1998 1999
1999 2000 def generate_pullrequest_title(self, source, source_ref, target):
2000 2001 return u'{source}#{at_ref} to {target}'.format(
2001 2002 source=source,
2002 2003 at_ref=source_ref,
2003 2004 target=target,
2004 2005 )
2005 2006
2006 2007 def _cleanup_merge_workspace(self, pull_request):
2007 2008 # Merging related cleanup
2008 2009 repo_id = pull_request.target_repo.repo_id
2009 2010 target_scm = pull_request.target_repo.scm_instance()
2010 2011 workspace_id = self._workspace_id(pull_request)
2011 2012
2012 2013 try:
2013 2014 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2014 2015 except NotImplementedError:
2015 2016 pass
2016 2017
2017 2018 def _get_repo_pullrequest_sources(
2018 2019 self, repo, commit_id=None, branch=None, bookmark=None,
2019 2020 translator=None):
2020 2021 """
2021 2022 Return a structure with repo's interesting commits, suitable for
2022 2023 the selectors in pullrequest controller
2023 2024
2024 2025 :param commit_id: a commit that must be in the list somehow
2025 2026 and selected by default
2026 2027 :param branch: a branch that must be in the list and selected
2027 2028 by default - even if closed
2028 2029 :param bookmark: a bookmark that must be in the list and selected
2029 2030 """
2030 2031 _ = translator or get_current_request().translate
2031 2032
2032 2033 commit_id = safe_str(commit_id) if commit_id else None
2033 2034 branch = safe_unicode(branch) if branch else None
2034 2035 bookmark = safe_unicode(bookmark) if bookmark else None
2035 2036
2036 2037 selected = None
2037 2038
2038 2039 # order matters: first source that has commit_id in it will be selected
2039 2040 sources = []
2040 2041 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2041 2042 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2042 2043
2043 2044 if commit_id:
2044 2045 ref_commit = (h.short_id(commit_id), commit_id)
2045 2046 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2046 2047
2047 2048 sources.append(
2048 2049 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2049 2050 )
2050 2051
2051 2052 groups = []
2052 2053
2053 2054 for group_key, ref_list, group_name, match in sources:
2054 2055 group_refs = []
2055 2056 for ref_name, ref_id in ref_list:
2056 2057 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2057 2058 group_refs.append((ref_key, ref_name))
2058 2059
2059 2060 if not selected:
2060 2061 if set([commit_id, match]) & set([ref_id, ref_name]):
2061 2062 selected = ref_key
2062 2063
2063 2064 if group_refs:
2064 2065 groups.append((group_refs, group_name))
2065 2066
2066 2067 if not selected:
2067 2068 ref = commit_id or branch or bookmark
2068 2069 if ref:
2069 2070 raise CommitDoesNotExistError(
2070 2071 u'No commit refs could be found matching: {}'.format(ref))
2071 2072 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2072 2073 selected = u'branch:{}:{}'.format(
2073 2074 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2074 2075 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2075 2076 )
2076 2077 elif repo.commit_ids:
2077 2078 # make the user select in this case
2078 2079 selected = None
2079 2080 else:
2080 2081 raise EmptyRepositoryError()
2081 2082 return groups, selected
2082 2083
2083 2084 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2084 2085 hide_whitespace_changes, diff_context):
2085 2086
2086 2087 return self._get_diff_from_pr_or_version(
2087 2088 source_repo, source_ref_id, target_ref_id,
2088 2089 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2089 2090
2090 2091 def _get_diff_from_pr_or_version(
2091 2092 self, source_repo, source_ref_id, target_ref_id,
2092 2093 hide_whitespace_changes, diff_context):
2093 2094
2094 2095 target_commit = source_repo.get_commit(
2095 2096 commit_id=safe_str(target_ref_id))
2096 2097 source_commit = source_repo.get_commit(
2097 2098 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2098 2099 if isinstance(source_repo, Repository):
2099 2100 vcs_repo = source_repo.scm_instance()
2100 2101 else:
2101 2102 vcs_repo = source_repo
2102 2103
2103 2104 # TODO: johbo: In the context of an update, we cannot reach
2104 2105 # the old commit anymore with our normal mechanisms. It needs
2105 2106 # some sort of special support in the vcs layer to avoid this
2106 2107 # workaround.
2107 2108 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2108 2109 vcs_repo.alias == 'git'):
2109 2110 source_commit.raw_id = safe_str(source_ref_id)
2110 2111
2111 2112 log.debug('calculating diff between '
2112 2113 'source_ref:%s and target_ref:%s for repo `%s`',
2113 2114 target_ref_id, source_ref_id,
2114 2115 safe_unicode(vcs_repo.path))
2115 2116
2116 2117 vcs_diff = vcs_repo.get_diff(
2117 2118 commit1=target_commit, commit2=source_commit,
2118 2119 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2119 2120 return vcs_diff
2120 2121
2121 2122 def _is_merge_enabled(self, pull_request):
2122 2123 return self._get_general_setting(
2123 2124 pull_request, 'rhodecode_pr_merge_enabled')
2124 2125
2125 2126 def _use_rebase_for_merging(self, pull_request):
2126 2127 repo_type = pull_request.target_repo.repo_type
2127 2128 if repo_type == 'hg':
2128 2129 return self._get_general_setting(
2129 2130 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2130 2131 elif repo_type == 'git':
2131 2132 return self._get_general_setting(
2132 2133 pull_request, 'rhodecode_git_use_rebase_for_merging')
2133 2134
2134 2135 return False
2135 2136
2136 2137 def _user_name_for_merging(self, pull_request, user):
2137 2138 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2138 2139 if env_user_name_attr and hasattr(user, env_user_name_attr):
2139 2140 user_name_attr = env_user_name_attr
2140 2141 else:
2141 2142 user_name_attr = 'short_contact'
2142 2143
2143 2144 user_name = getattr(user, user_name_attr)
2144 2145 return user_name
2145 2146
2146 2147 def _close_branch_before_merging(self, pull_request):
2147 2148 repo_type = pull_request.target_repo.repo_type
2148 2149 if repo_type == 'hg':
2149 2150 return self._get_general_setting(
2150 2151 pull_request, 'rhodecode_hg_close_branch_before_merging')
2151 2152 elif repo_type == 'git':
2152 2153 return self._get_general_setting(
2153 2154 pull_request, 'rhodecode_git_close_branch_before_merging')
2154 2155
2155 2156 return False
2156 2157
2157 2158 def _get_general_setting(self, pull_request, settings_key, default=False):
2158 2159 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2159 2160 settings = settings_model.get_general_settings()
2160 2161 return settings.get(settings_key, default)
2161 2162
2162 2163 def _log_audit_action(self, action, action_data, user, pull_request):
2163 2164 audit_logger.store(
2164 2165 action=action,
2165 2166 action_data=action_data,
2166 2167 user=user,
2167 2168 repo=pull_request.target_repo)
2168 2169
2169 2170 def get_reviewer_functions(self):
2170 2171 """
2171 2172 Fetches functions for validation and fetching default reviewers.
2172 2173 If available we use the EE package, else we fallback to CE
2173 2174 package functions
2174 2175 """
2175 2176 try:
2176 2177 from rc_reviewers.utils import get_default_reviewers_data
2177 2178 from rc_reviewers.utils import validate_default_reviewers
2178 2179 from rc_reviewers.utils import validate_observers
2179 2180 except ImportError:
2180 2181 from rhodecode.apps.repository.utils import get_default_reviewers_data
2181 2182 from rhodecode.apps.repository.utils import validate_default_reviewers
2182 2183 from rhodecode.apps.repository.utils import validate_observers
2183 2184
2184 2185 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2185 2186
2186 2187
2187 2188 class MergeCheck(object):
2188 2189 """
2189 2190 Perform Merge Checks and returns a check object which stores information
2190 2191 about merge errors, and merge conditions
2191 2192 """
2192 2193 TODO_CHECK = 'todo'
2193 2194 PERM_CHECK = 'perm'
2194 2195 REVIEW_CHECK = 'review'
2195 2196 MERGE_CHECK = 'merge'
2196 2197 WIP_CHECK = 'wip'
2197 2198
2198 2199 def __init__(self):
2199 2200 self.review_status = None
2200 2201 self.merge_possible = None
2201 2202 self.merge_msg = ''
2202 2203 self.merge_response = None
2203 2204 self.failed = None
2204 2205 self.errors = []
2205 2206 self.error_details = OrderedDict()
2206 2207 self.source_commit = AttributeDict()
2207 2208 self.target_commit = AttributeDict()
2208 2209 self.reviewers_count = 0
2209 2210 self.observers_count = 0
2210 2211
2211 2212 def __repr__(self):
2212 2213 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2213 2214 self.merge_possible, self.failed, self.errors)
2214 2215
2215 2216 def push_error(self, error_type, message, error_key, details):
2216 2217 self.failed = True
2217 2218 self.errors.append([error_type, message])
2218 2219 self.error_details[error_key] = dict(
2219 2220 details=details,
2220 2221 error_type=error_type,
2221 2222 message=message
2222 2223 )
2223 2224
2224 2225 @classmethod
2225 2226 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2226 2227 force_shadow_repo_refresh=False):
2227 2228 _ = translator
2228 2229 merge_check = cls()
2229 2230
2230 2231 # title has WIP:
2231 2232 if pull_request.work_in_progress:
2232 2233 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2233 2234
2234 2235 msg = _('WIP marker in title prevents from accidental merge.')
2235 2236 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2236 2237 if fail_early:
2237 2238 return merge_check
2238 2239
2239 2240 # permissions to merge
2240 2241 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2241 2242 if not user_allowed_to_merge:
2242 2243 log.debug("MergeCheck: cannot merge, approval is pending.")
2243 2244
2244 2245 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2245 2246 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2246 2247 if fail_early:
2247 2248 return merge_check
2248 2249
2249 2250 # permission to merge into the target branch
2250 2251 target_commit_id = pull_request.target_ref_parts.commit_id
2251 2252 if pull_request.target_ref_parts.type == 'branch':
2252 2253 branch_name = pull_request.target_ref_parts.name
2253 2254 else:
2254 2255 # for mercurial we can always figure out the branch from the commit
2255 2256 # in case of bookmark
2256 2257 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2257 2258 branch_name = target_commit.branch
2258 2259
2259 2260 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2260 2261 pull_request.target_repo.repo_name, branch_name)
2261 2262 if branch_perm and branch_perm == 'branch.none':
2262 2263 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2263 2264 branch_name, rule)
2264 2265 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2265 2266 if fail_early:
2266 2267 return merge_check
2267 2268
2268 2269 # review status, must be always present
2269 2270 review_status = pull_request.calculated_review_status()
2270 2271 merge_check.review_status = review_status
2271 2272 merge_check.reviewers_count = pull_request.reviewers_count
2272 2273 merge_check.observers_count = pull_request.observers_count
2273 2274
2274 2275 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2275 2276 if not status_approved and merge_check.reviewers_count:
2276 2277 log.debug("MergeCheck: cannot merge, approval is pending.")
2277 2278 msg = _('Pull request reviewer approval is pending.')
2278 2279
2279 2280 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2280 2281
2281 2282 if fail_early:
2282 2283 return merge_check
2283 2284
2284 2285 # left over TODOs
2285 2286 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2286 2287 if todos:
2287 2288 log.debug("MergeCheck: cannot merge, {} "
2288 2289 "unresolved TODOs left.".format(len(todos)))
2289 2290
2290 2291 if len(todos) == 1:
2291 2292 msg = _('Cannot merge, {} TODO still not resolved.').format(
2292 2293 len(todos))
2293 2294 else:
2294 2295 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2295 2296 len(todos))
2296 2297
2297 2298 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2298 2299
2299 2300 if fail_early:
2300 2301 return merge_check
2301 2302
2302 2303 # merge possible, here is the filesystem simulation + shadow repo
2303 2304 merge_response, merge_status, msg = PullRequestModel().merge_status(
2304 2305 pull_request, translator=translator,
2305 2306 force_shadow_repo_refresh=force_shadow_repo_refresh)
2306 2307
2307 2308 merge_check.merge_possible = merge_status
2308 2309 merge_check.merge_msg = msg
2309 2310 merge_check.merge_response = merge_response
2310 2311
2311 2312 source_ref_id = pull_request.source_ref_parts.commit_id
2312 2313 target_ref_id = pull_request.target_ref_parts.commit_id
2313 2314
2314 2315 try:
2315 2316 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2316 2317 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2317 2318 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2318 2319 merge_check.source_commit.current_raw_id = source_commit.raw_id
2319 2320 merge_check.source_commit.previous_raw_id = source_ref_id
2320 2321
2321 2322 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2322 2323 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2323 2324 merge_check.target_commit.current_raw_id = target_commit.raw_id
2324 2325 merge_check.target_commit.previous_raw_id = target_ref_id
2325 2326 except (SourceRefMissing, TargetRefMissing):
2326 2327 pass
2327 2328
2328 2329 if not merge_status:
2329 2330 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2330 2331 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2331 2332
2332 2333 if fail_early:
2333 2334 return merge_check
2334 2335
2335 2336 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2336 2337 return merge_check
2337 2338
2338 2339 @classmethod
2339 2340 def get_merge_conditions(cls, pull_request, translator):
2340 2341 _ = translator
2341 2342 merge_details = {}
2342 2343
2343 2344 model = PullRequestModel()
2344 2345 use_rebase = model._use_rebase_for_merging(pull_request)
2345 2346
2346 2347 if use_rebase:
2347 2348 merge_details['merge_strategy'] = dict(
2348 2349 details={},
2349 2350 message=_('Merge strategy: rebase')
2350 2351 )
2351 2352 else:
2352 2353 merge_details['merge_strategy'] = dict(
2353 2354 details={},
2354 2355 message=_('Merge strategy: explicit merge commit')
2355 2356 )
2356 2357
2357 2358 close_branch = model._close_branch_before_merging(pull_request)
2358 2359 if close_branch:
2359 2360 repo_type = pull_request.target_repo.repo_type
2360 2361 close_msg = ''
2361 2362 if repo_type == 'hg':
2362 2363 close_msg = _('Source branch will be closed before the merge.')
2363 2364 elif repo_type == 'git':
2364 2365 close_msg = _('Source branch will be deleted after the merge.')
2365 2366
2366 2367 merge_details['close_branch'] = dict(
2367 2368 details={},
2368 2369 message=close_msg
2369 2370 )
2370 2371
2371 2372 return merge_details
2372 2373
2373 2374
2374 2375 ChangeTuple = collections.namedtuple(
2375 2376 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2376 2377
2377 2378 FileChangeTuple = collections.namedtuple(
2378 2379 'FileChangeTuple', ['added', 'modified', 'removed'])
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now