##// END OF EJS Templates
encryption: added new backend using cryptography + Fernet encryption....
marcink -
r3522:3910c057 default
parent child Browse files
Show More
@@ -0,0 +1,69 b''
1 import os
2 import base64
3 from cryptography.fernet import Fernet, InvalidToken
4 from cryptography.hazmat.backends import default_backend
5 from cryptography.hazmat.primitives import hashes
6 from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
7
8
9 class Encryptor(object):
10 key_format = 'enc2$salt:{}$data:{}'
11 pref_len = 5 # salt:, data:
12
13 def __init__(self, enc_key):
14 self.enc_key = enc_key
15
16 def b64_encode(self, data):
17 return base64.urlsafe_b64encode(data)
18
19 def b64_decode(self, data):
20 return base64.urlsafe_b64decode(data)
21
22 def get_encryptor(self, salt):
23 """
24 Uses Fernet as encryptor with HMAC signature
25 :param salt: random salt used for encrypting the data
26 """
27 kdf = PBKDF2HMAC(
28 algorithm=hashes.SHA512(),
29 length=32,
30 salt=salt,
31 iterations=100000,
32 backend=default_backend()
33 )
34 key = self.b64_encode(kdf.derive(self.enc_key))
35 return Fernet(key)
36
37 def _get_parts(self, enc_data):
38 parts = enc_data.split('$', 3)
39 if len(parts) != 3:
40 raise ValueError('Encrypted Data has invalid format, expected {}'.format(self.key_format))
41 prefix, salt, enc_data = parts
42
43 try:
44 salt = self.b64_decode(salt[self.pref_len:])
45 except TypeError:
46 # bad base64
47 raise ValueError('Encrypted Data salt invalid format, expected base64 format')
48
49 enc_data = enc_data[self.pref_len:]
50 return prefix, salt, enc_data
51
52 def encrypt(self, data):
53 salt = os.urandom(64)
54 encryptor = self.get_encryptor(salt)
55 enc_data = encryptor.encrypt(data)
56 return self.key_format.format(self.b64_encode(salt), enc_data)
57
58 def decrypt(self, data, safe=True):
59 parts = self._get_parts(data)
60 salt = parts[1]
61 enc_data = parts[2]
62 encryptor = self.get_encryptor(salt)
63 try:
64 return encryptor.decrypt(enc_data)
65 except (InvalidToken,):
66 if safe:
67 return ''
68 else:
69 raise
@@ -1,732 +1,737 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 ################################################################################
6 6
7 7 [DEFAULT]
8 8 ## Debug flag sets all loggers to debug, and enables request tracking
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 #smtp_server = mail.server.com
25 25 #smtp_username =
26 26 #smtp_password =
27 27 #smtp_port =
28 28 #smtp_use_tls = false
29 29 #smtp_use_ssl = true
30 30
31 31 [server:main]
32 32 ## COMMON ##
33 33 host = 127.0.0.1
34 34 port = 5000
35 35
36 36 ###########################################################
37 37 ## WAITRESS WSGI SERVER - Recommended for Development ####
38 38 ###########################################################
39 39
40 40 use = egg:waitress#main
41 41 ## number of worker threads
42 42 threads = 5
43 43 ## MAX BODY SIZE 100GB
44 44 max_request_body_size = 107374182400
45 45 ## Use poll instead of select, fixes file descriptors limits problems.
46 46 ## May not work on old windows systems.
47 47 asyncore_use_poll = true
48 48
49 49
50 50 ##########################
51 51 ## GUNICORN WSGI SERVER ##
52 52 ##########################
53 53 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
54 54
55 55 #use = egg:gunicorn#main
56 56 ## Sets the number of process workers. More workers means more concurrent connections
57 57 ## RhodeCode can handle at the same time. Each additional worker also it increases
58 58 ## memory usage as each has it's own set of caches.
59 59 ## Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
60 60 ## than 8-10 unless for really big deployments .e.g 700-1000 users.
61 61 ## `instance_id = *` must be set in the [app:main] section below (which is the default)
62 62 ## when using more than 1 worker.
63 63 #workers = 2
64 64 ## process name visible in process list
65 65 #proc_name = rhodecode
66 66 ## type of worker class, one of sync, gevent
67 67 ## recommended for bigger setup is using of of other than sync one
68 68 #worker_class = gevent
69 69 ## The maximum number of simultaneous clients. Valid only for Gevent
70 70 #worker_connections = 10
71 71 ## max number of requests that worker will handle before being gracefully
72 72 ## restarted, could prevent memory leaks
73 73 #max_requests = 1000
74 74 #max_requests_jitter = 30
75 75 ## amount of time a worker can spend with handling a request before it
76 76 ## gets killed and restarted. Set to 6hrs
77 77 #timeout = 21600
78 78
79 79
80 80 ## prefix middleware for RhodeCode.
81 81 ## recommended when using proxy setup.
82 82 ## allows to set RhodeCode under a prefix in server.
83 83 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
84 84 ## And set your prefix like: `prefix = /custom_prefix`
85 85 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
86 86 ## to make your cookies only work on prefix url
87 87 [filter:proxy-prefix]
88 88 use = egg:PasteDeploy#prefix
89 89 prefix = /
90 90
91 91 [app:main]
92 92 ## The %(here)s variable will be replaced with the absolute path of parent directory
93 93 ## of this file
94 94 ## In addition ENVIRONMENT variables usage is possible, e.g
95 95 ## sqlalchemy.db1.url = {ENV_RC_DB_URL}
96 96
97 97 use = egg:rhodecode-enterprise-ce
98 98
99 99 ## enable proxy prefix middleware, defined above
100 100 #filter-with = proxy-prefix
101 101
102 102 # During development the we want to have the debug toolbar enabled
103 103 pyramid.includes =
104 104 pyramid_debugtoolbar
105 105 rhodecode.lib.middleware.request_wrapper
106 106
107 107 pyramid.reload_templates = true
108 108
109 109 debugtoolbar.hosts = 0.0.0.0/0
110 110 debugtoolbar.exclude_prefixes =
111 111 /css
112 112 /fonts
113 113 /images
114 114 /js
115 115
116 116 ## RHODECODE PLUGINS ##
117 117 rhodecode.includes =
118 118 rhodecode.api
119 119
120 120
121 121 # api prefix url
122 122 rhodecode.api.url = /_admin/api
123 123
124 124
125 125 ## END RHODECODE PLUGINS ##
126 126
127 127 ## encryption key used to encrypt social plugin tokens,
128 128 ## remote_urls with credentials etc, if not set it defaults to
129 129 ## `beaker.session.secret`
130 130 #rhodecode.encrypted_values.secret =
131 131
132 132 ## decryption strict mode (enabled by default). It controls if decryption raises
133 133 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
134 134 #rhodecode.encrypted_values.strict = false
135 135
136 ## Pick algorithm for encryption. Either fernet (more secure) or aes (default)
137 ## fernet is safer, and we strongly recommend switching to it.
138 ## Due to backward compatibility aes is used as default.
139 #rhodecode.encrypted_values.algorithm = fernet
140
136 141 ## return gzipped responses from RhodeCode (static files/application)
137 142 gzip_responses = false
138 143
139 144 ## auto-generate javascript routes file on startup
140 145 generate_js_files = false
141 146
142 147 ## System global default language.
143 148 ## All available languages: en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
144 149 lang = en
145 150
146 151 ## Perform a full repository scan and import on each server start.
147 152 ## Settings this to true could lead to very long startup time.
148 153 startup.import_repos = false
149 154
150 155 ## Uncomment and set this path to use archive download cache.
151 156 ## Once enabled, generated archives will be cached at this location
152 157 ## and served from the cache during subsequent requests for the same archive of
153 158 ## the repository.
154 159 #archive_cache_dir = /tmp/tarballcache
155 160
156 161 ## URL at which the application is running. This is used for Bootstrapping
157 162 ## requests in context when no web request is available. Used in ishell, or
158 163 ## SSH calls. Set this for events to receive proper url for SSH calls.
159 164 app.base_url = http://rhodecode.local
160 165
161 166 ## Unique application ID. Should be a random unique string for security.
162 167 app_instance_uuid = rc-production
163 168
164 169 ## Cut off limit for large diffs (size in bytes). If overall diff size on
165 170 ## commit, or pull request exceeds this limit this diff will be displayed
166 171 ## partially. E.g 512000 == 512Kb
167 172 cut_off_limit_diff = 512000
168 173
169 174 ## Cut off limit for large files inside diffs (size in bytes). Each individual
170 175 ## file inside diff which exceeds this limit will be displayed partially.
171 176 ## E.g 128000 == 128Kb
172 177 cut_off_limit_file = 128000
173 178
174 179 ## use cached version of vcs repositories everywhere. Recommended to be `true`
175 180 vcs_full_cache = true
176 181
177 182 ## Force https in RhodeCode, fixes https redirects, assumes it's always https.
178 183 ## Normally this is controlled by proper http flags sent from http server
179 184 force_https = false
180 185
181 186 ## use Strict-Transport-Security headers
182 187 use_htsts = false
183 188
184 189 ## git rev filter option, --all is the default filter, if you need to
185 190 ## hide all refs in changelog switch this to --branches --tags
186 191 git_rev_filter = --branches --tags
187 192
188 193 # Set to true if your repos are exposed using the dumb protocol
189 194 git_update_server_info = false
190 195
191 196 ## RSS/ATOM feed options
192 197 rss_cut_off_limit = 256000
193 198 rss_items_per_page = 10
194 199 rss_include_diff = false
195 200
196 201 ## gist URL alias, used to create nicer urls for gist. This should be an
197 202 ## url that does rewrites to _admin/gists/{gistid}.
198 203 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
199 204 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
200 205 gist_alias_url =
201 206
202 207 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
203 208 ## used for access.
204 209 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
205 210 ## came from the the logged in user who own this authentication token.
206 211 ## Additionally @TOKEN syntax can be used to bound the view to specific
207 212 ## authentication token. Such view would be only accessible when used together
208 213 ## with this authentication token
209 214 ##
210 215 ## list of all views can be found under `/_admin/permissions/auth_token_access`
211 216 ## The list should be "," separated and on a single line.
212 217 ##
213 218 ## Most common views to enable:
214 219 # RepoCommitsView:repo_commit_download
215 220 # RepoCommitsView:repo_commit_patch
216 221 # RepoCommitsView:repo_commit_raw
217 222 # RepoCommitsView:repo_commit_raw@TOKEN
218 223 # RepoFilesView:repo_files_diff
219 224 # RepoFilesView:repo_archivefile
220 225 # RepoFilesView:repo_file_raw
221 226 # GistView:*
222 227 api_access_controllers_whitelist =
223 228
224 229 ## Default encoding used to convert from and to unicode
225 230 ## can be also a comma separated list of encoding in case of mixed encodings
226 231 default_encoding = UTF-8
227 232
228 233 ## instance-id prefix
229 234 ## a prefix key for this instance used for cache invalidation when running
230 235 ## multiple instances of RhodeCode, make sure it's globally unique for
231 236 ## all running RhodeCode instances. Leave empty if you don't use it
232 237 instance_id =
233 238
234 239 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
235 240 ## of an authentication plugin also if it is disabled by it's settings.
236 241 ## This could be useful if you are unable to log in to the system due to broken
237 242 ## authentication settings. Then you can enable e.g. the internal RhodeCode auth
238 243 ## module to log in again and fix the settings.
239 244 ##
240 245 ## Available builtin plugin IDs (hash is part of the ID):
241 246 ## egg:rhodecode-enterprise-ce#rhodecode
242 247 ## egg:rhodecode-enterprise-ce#pam
243 248 ## egg:rhodecode-enterprise-ce#ldap
244 249 ## egg:rhodecode-enterprise-ce#jasig_cas
245 250 ## egg:rhodecode-enterprise-ce#headers
246 251 ## egg:rhodecode-enterprise-ce#crowd
247 252 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
248 253
249 254 ## alternative return HTTP header for failed authentication. Default HTTP
250 255 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
251 256 ## handling that causing a series of failed authentication calls.
252 257 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
253 258 ## This will be served instead of default 401 on bad authentication
254 259 auth_ret_code =
255 260
256 261 ## use special detection method when serving auth_ret_code, instead of serving
257 262 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
258 263 ## and then serve auth_ret_code to clients
259 264 auth_ret_code_detection = false
260 265
261 266 ## locking return code. When repository is locked return this HTTP code. 2XX
262 267 ## codes don't break the transactions while 4XX codes do
263 268 lock_ret_code = 423
264 269
265 270 ## allows to change the repository location in settings page
266 271 allow_repo_location_change = true
267 272
268 273 ## allows to setup custom hooks in settings page
269 274 allow_custom_hooks_settings = true
270 275
271 276 ## Generated license token required for EE edition license.
272 277 ## New generated token value can be found in Admin > settings > license page.
273 278 license_token =
274 279
275 280 ## supervisor connection uri, for managing supervisor and logs.
276 281 supervisor.uri =
277 282 ## supervisord group name/id we only want this RC instance to handle
278 283 supervisor.group_id = dev
279 284
280 285 ## Display extended labs settings
281 286 labs_settings_active = true
282 287
283 288 ## Custom exception store path, defaults to TMPDIR
284 289 ## This is used to store exception from RhodeCode in shared directory
285 290 #exception_tracker.store_path =
286 291
287 292 ## File store configuration. This is used to store and serve uploaded files
288 293 file_store.enabled = true
289 294 ## Storage backend, available options are: local
290 295 file_store.backend = local
291 296 ## path to store the uploaded binaries
292 297 file_store.storage_path = %(here)s/data/file_store
293 298
294 299
295 300 ####################################
296 301 ### CELERY CONFIG ####
297 302 ####################################
298 303 ## run: /path/to/celery worker \
299 304 ## -E --beat --app rhodecode.lib.celerylib.loader \
300 305 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
301 306 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
302 307
303 308 use_celery = false
304 309
305 310 ## connection url to the message broker (default rabbitmq)
306 311 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
307 312
308 313 ## maximum tasks to execute before worker restart
309 314 celery.max_tasks_per_child = 100
310 315
311 316 ## tasks will never be sent to the queue, but executed locally instead.
312 317 celery.task_always_eager = false
313 318
314 319 #####################################
315 320 ### DOGPILE CACHE ####
316 321 #####################################
317 322 ## Default cache dir for caches. Putting this into a ramdisk
318 323 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
319 324 ## large amount of space
320 325 cache_dir = %(here)s/data
321 326
322 327 ## `cache_perms` cache settings for permission tree, auth TTL.
323 328 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
324 329 rc_cache.cache_perms.expiration_time = 300
325 330
326 331 ## alternative `cache_perms` redis backend with distributed lock
327 332 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
328 333 #rc_cache.cache_perms.expiration_time = 300
329 334 ## redis_expiration_time needs to be greater then expiration_time
330 335 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
331 336 #rc_cache.cache_perms.arguments.socket_timeout = 30
332 337 #rc_cache.cache_perms.arguments.host = localhost
333 338 #rc_cache.cache_perms.arguments.port = 6379
334 339 #rc_cache.cache_perms.arguments.db = 0
335 340 ## more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
336 341 #rc_cache.cache_perms.arguments.distributed_lock = true
337 342
338 343 ## `cache_repo` cache settings for FileTree, Readme, RSS FEEDS
339 344 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
340 345 rc_cache.cache_repo.expiration_time = 2592000
341 346
342 347 ## alternative `cache_repo` redis backend with distributed lock
343 348 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
344 349 #rc_cache.cache_repo.expiration_time = 2592000
345 350 ## redis_expiration_time needs to be greater then expiration_time
346 351 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
347 352 #rc_cache.cache_repo.arguments.socket_timeout = 30
348 353 #rc_cache.cache_repo.arguments.host = localhost
349 354 #rc_cache.cache_repo.arguments.port = 6379
350 355 #rc_cache.cache_repo.arguments.db = 1
351 356 ## more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
352 357 #rc_cache.cache_repo.arguments.distributed_lock = true
353 358
354 359 ## cache settings for SQL queries, this needs to use memory type backend
355 360 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
356 361 rc_cache.sql_cache_short.expiration_time = 30
357 362
358 363 ## `cache_repo_longterm` cache for repo object instances, this needs to use memory
359 364 ## type backend as the objects kept are not pickle serializable
360 365 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
361 366 ## by default we use 96H, this is using invalidation on push anyway
362 367 rc_cache.cache_repo_longterm.expiration_time = 345600
363 368 ## max items in LRU cache, reduce this number to save memory, and expire last used
364 369 ## cached objects
365 370 rc_cache.cache_repo_longterm.max_size = 10000
366 371
367 372
368 373 ####################################
369 374 ### BEAKER SESSION ####
370 375 ####################################
371 376
372 377 ## .session.type is type of storage options for the session, current allowed
373 378 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
374 379 beaker.session.type = file
375 380 beaker.session.data_dir = %(here)s/data/sessions
376 381
377 382 ## db based session, fast, and allows easy management over logged in users
378 383 #beaker.session.type = ext:database
379 384 #beaker.session.table_name = db_session
380 385 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
381 386 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
382 387 #beaker.session.sa.pool_recycle = 3600
383 388 #beaker.session.sa.echo = false
384 389
385 390 beaker.session.key = rhodecode
386 391 beaker.session.secret = develop-rc-uytcxaz
387 392 beaker.session.lock_dir = %(here)s/data/sessions/lock
388 393
389 394 ## Secure encrypted cookie. Requires AES and AES python libraries
390 395 ## you must disable beaker.session.secret to use this
391 396 #beaker.session.encrypt_key = key_for_encryption
392 397 #beaker.session.validate_key = validation_key
393 398
394 399 ## sets session as invalid(also logging out user) if it haven not been
395 400 ## accessed for given amount of time in seconds
396 401 beaker.session.timeout = 2592000
397 402 beaker.session.httponly = true
398 403 ## Path to use for the cookie. Set to prefix if you use prefix middleware
399 404 #beaker.session.cookie_path = /custom_prefix
400 405
401 406 ## uncomment for https secure cookie
402 407 beaker.session.secure = false
403 408
404 409 ## auto save the session to not to use .save()
405 410 beaker.session.auto = false
406 411
407 412 ## default cookie expiration time in seconds, set to `true` to set expire
408 413 ## at browser close
409 414 #beaker.session.cookie_expires = 3600
410 415
411 416 ###################################
412 417 ## SEARCH INDEXING CONFIGURATION ##
413 418 ###################################
414 419 ## Full text search indexer is available in rhodecode-tools under
415 420 ## `rhodecode-tools index` command
416 421
417 422 ## WHOOSH Backend, doesn't require additional services to run
418 423 ## it works good with few dozen repos
419 424 search.module = rhodecode.lib.index.whoosh
420 425 search.location = %(here)s/data/index
421 426
422 427 ########################################
423 428 ### CHANNELSTREAM CONFIG ####
424 429 ########################################
425 430 ## channelstream enables persistent connections and live notification
426 431 ## in the system. It's also used by the chat system
427 432
428 433 channelstream.enabled = false
429 434
430 435 ## server address for channelstream server on the backend
431 436 channelstream.server = 127.0.0.1:9800
432 437
433 438 ## location of the channelstream server from outside world
434 439 ## use ws:// for http or wss:// for https. This address needs to be handled
435 440 ## by external HTTP server such as Nginx or Apache
436 441 ## see Nginx/Apache configuration examples in our docs
437 442 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
438 443 channelstream.secret = secret
439 444 channelstream.history.location = %(here)s/channelstream_history
440 445
441 446 ## Internal application path that Javascript uses to connect into.
442 447 ## If you use proxy-prefix the prefix should be added before /_channelstream
443 448 channelstream.proxy_path = /_channelstream
444 449
445 450
446 451 ###################################
447 452 ## APPENLIGHT CONFIG ##
448 453 ###################################
449 454
450 455 ## Appenlight is tailored to work with RhodeCode, see
451 456 ## http://appenlight.com for details how to obtain an account
452 457
453 458 ## Appenlight integration enabled
454 459 appenlight = false
455 460
456 461 appenlight.server_url = https://api.appenlight.com
457 462 appenlight.api_key = YOUR_API_KEY
458 463 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
459 464
460 465 ## used for JS client
461 466 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
462 467
463 468 ## TWEAK AMOUNT OF INFO SENT HERE
464 469
465 470 ## enables 404 error logging (default False)
466 471 appenlight.report_404 = false
467 472
468 473 ## time in seconds after request is considered being slow (default 1)
469 474 appenlight.slow_request_time = 1
470 475
471 476 ## record slow requests in application
472 477 ## (needs to be enabled for slow datastore recording and time tracking)
473 478 appenlight.slow_requests = true
474 479
475 480 ## enable hooking to application loggers
476 481 appenlight.logging = true
477 482
478 483 ## minimum log level for log capture
479 484 appenlight.logging.level = WARNING
480 485
481 486 ## send logs only from erroneous/slow requests
482 487 ## (saves API quota for intensive logging)
483 488 appenlight.logging_on_error = false
484 489
485 490 ## list of additional keywords that should be grabbed from environ object
486 491 ## can be string with comma separated list of words in lowercase
487 492 ## (by default client will always send following info:
488 493 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
489 494 ## start with HTTP* this list be extended with additional keywords here
490 495 appenlight.environ_keys_whitelist =
491 496
492 497 ## list of keywords that should be blanked from request object
493 498 ## can be string with comma separated list of words in lowercase
494 499 ## (by default client will always blank keys that contain following words
495 500 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
496 501 ## this list be extended with additional keywords set here
497 502 appenlight.request_keys_blacklist =
498 503
499 504 ## list of namespaces that should be ignores when gathering log entries
500 505 ## can be string with comma separated list of namespaces
501 506 ## (by default the client ignores own entries: appenlight_client.client)
502 507 appenlight.log_namespace_blacklist =
503 508
504 509 # enable debug style page
505 510 debug_style = true
506 511
507 512 ###########################################
508 513 ### MAIN RHODECODE DATABASE CONFIG ###
509 514 ###########################################
510 515 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
511 516 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
512 517 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
513 518 # pymysql is an alternative driver for MySQL, use in case of problems with default one
514 519 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
515 520
516 521 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
517 522
518 523 # see sqlalchemy docs for other advanced settings
519 524
520 525 ## print the sql statements to output
521 526 sqlalchemy.db1.echo = false
522 527 ## recycle the connections after this amount of seconds
523 528 sqlalchemy.db1.pool_recycle = 3600
524 529 sqlalchemy.db1.convert_unicode = true
525 530
526 531 ## the number of connections to keep open inside the connection pool.
527 532 ## 0 indicates no limit
528 533 #sqlalchemy.db1.pool_size = 5
529 534
530 535 ## the number of connections to allow in connection pool "overflow", that is
531 536 ## connections that can be opened above and beyond the pool_size setting,
532 537 ## which defaults to five.
533 538 #sqlalchemy.db1.max_overflow = 10
534 539
535 540 ## Connection check ping, used to detect broken database connections
536 541 ## could be enabled to better handle cases if MySQL has gone away errors
537 542 #sqlalchemy.db1.ping_connection = true
538 543
539 544 ##################
540 545 ### VCS CONFIG ###
541 546 ##################
542 547 vcs.server.enable = true
543 548 vcs.server = localhost:9900
544 549
545 550 ## Web server connectivity protocol, responsible for web based VCS operations
546 551 ## Available protocols are:
547 552 ## `http` - use http-rpc backend (default)
548 553 vcs.server.protocol = http
549 554
550 555 ## Push/Pull operations protocol, available options are:
551 556 ## `http` - use http-rpc backend (default)
552 557 vcs.scm_app_implementation = http
553 558
554 559 ## Push/Pull operations hooks protocol, available options are:
555 560 ## `http` - use http-rpc backend (default)
556 561 vcs.hooks.protocol = http
557 562
558 563 ## Host on which this instance is listening for hooks. If vcsserver is in other location
559 564 ## this should be adjusted.
560 565 vcs.hooks.host = 127.0.0.1
561 566
562 567 vcs.server.log_level = debug
563 568 ## Start VCSServer with this instance as a subprocess, useful for development
564 569 vcs.start_server = false
565 570
566 571 ## List of enabled VCS backends, available options are:
567 572 ## `hg` - mercurial
568 573 ## `git` - git
569 574 ## `svn` - subversion
570 575 vcs.backends = hg, git, svn
571 576
572 577 vcs.connection_timeout = 3600
573 578 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
574 579 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
575 580 #vcs.svn.compatible_version = pre-1.8-compatible
576 581
577 582
578 583 ############################################################
579 584 ### Subversion proxy support (mod_dav_svn) ###
580 585 ### Maps RhodeCode repo groups into SVN paths for Apache ###
581 586 ############################################################
582 587 ## Enable or disable the config file generation.
583 588 svn.proxy.generate_config = false
584 589 ## Generate config file with `SVNListParentPath` set to `On`.
585 590 svn.proxy.list_parent_path = true
586 591 ## Set location and file name of generated config file.
587 592 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
588 593 ## alternative mod_dav config template. This needs to be a mako template
589 594 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
590 595 ## Used as a prefix to the `Location` block in the generated config file.
591 596 ## In most cases it should be set to `/`.
592 597 svn.proxy.location_root = /
593 598 ## Command to reload the mod dav svn configuration on change.
594 599 ## Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
595 600 ## Make sure user who runs RhodeCode process is allowed to reload Apache
596 601 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
597 602 ## If the timeout expires before the reload command finishes, the command will
598 603 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
599 604 #svn.proxy.reload_timeout = 10
600 605
601 606 ############################################################
602 607 ### SSH Support Settings ###
603 608 ############################################################
604 609
605 610 ## Defines if a custom authorized_keys file should be created and written on
606 611 ## any change user ssh keys. Setting this to false also disables possibility
607 612 ## of adding SSH keys by users from web interface. Super admins can still
608 613 ## manage SSH Keys.
609 614 ssh.generate_authorized_keyfile = false
610 615
611 616 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
612 617 # ssh.authorized_keys_ssh_opts =
613 618
614 619 ## Path to the authorized_keys file where the generate entries are placed.
615 620 ## It is possible to have multiple key files specified in `sshd_config` e.g.
616 621 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
617 622 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
618 623
619 624 ## Command to execute the SSH wrapper. The binary is available in the
620 625 ## RhodeCode installation directory.
621 626 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
622 627 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
623 628
624 629 ## Allow shell when executing the ssh-wrapper command
625 630 ssh.wrapper_cmd_allow_shell = false
626 631
627 632 ## Enables logging, and detailed output send back to the client during SSH
628 633 ## operations. Useful for debugging, shouldn't be used in production.
629 634 ssh.enable_debug_logging = true
630 635
631 636 ## Paths to binary executable, by default they are the names, but we can
632 637 ## override them if we want to use a custom one
633 638 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
634 639 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
635 640 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
636 641
637 642 ## Enables SSH key generator web interface. Disabling this still allows users
638 643 ## to add their own keys.
639 644 ssh.enable_ui_key_generator = true
640 645
641 646
642 647 ## Dummy marker to add new entries after.
643 648 ## Add any custom entries below. Please don't remove.
644 649 custom.conf = 1
645 650
646 651
647 652 ################################
648 653 ### LOGGING CONFIGURATION ####
649 654 ################################
650 655 [loggers]
651 656 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
652 657
653 658 [handlers]
654 659 keys = console, console_sql
655 660
656 661 [formatters]
657 662 keys = generic, color_formatter, color_formatter_sql
658 663
659 664 #############
660 665 ## LOGGERS ##
661 666 #############
662 667 [logger_root]
663 668 level = NOTSET
664 669 handlers = console
665 670
666 671 [logger_sqlalchemy]
667 672 level = INFO
668 673 handlers = console_sql
669 674 qualname = sqlalchemy.engine
670 675 propagate = 0
671 676
672 677 [logger_beaker]
673 678 level = DEBUG
674 679 handlers =
675 680 qualname = beaker.container
676 681 propagate = 1
677 682
678 683 [logger_rhodecode]
679 684 level = DEBUG
680 685 handlers =
681 686 qualname = rhodecode
682 687 propagate = 1
683 688
684 689 [logger_ssh_wrapper]
685 690 level = DEBUG
686 691 handlers =
687 692 qualname = ssh_wrapper
688 693 propagate = 1
689 694
690 695 [logger_celery]
691 696 level = DEBUG
692 697 handlers =
693 698 qualname = celery
694 699
695 700
696 701 ##############
697 702 ## HANDLERS ##
698 703 ##############
699 704
700 705 [handler_console]
701 706 class = StreamHandler
702 707 args = (sys.stderr, )
703 708 level = DEBUG
704 709 formatter = color_formatter
705 710
706 711 [handler_console_sql]
707 712 # "level = DEBUG" logs SQL queries and results.
708 713 # "level = INFO" logs SQL queries.
709 714 # "level = WARN" logs neither. (Recommended for production systems.)
710 715 class = StreamHandler
711 716 args = (sys.stderr, )
712 717 level = WARN
713 718 formatter = color_formatter_sql
714 719
715 720 ################
716 721 ## FORMATTERS ##
717 722 ################
718 723
719 724 [formatter_generic]
720 725 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
721 726 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
722 727 datefmt = %Y-%m-%d %H:%M:%S
723 728
724 729 [formatter_color_formatter]
725 730 class = rhodecode.lib.logging_formatter.ColorFormatter
726 731 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
727 732 datefmt = %Y-%m-%d %H:%M:%S
728 733
729 734 [formatter_color_formatter_sql]
730 735 class = rhodecode.lib.logging_formatter.ColorFormatterSql
731 736 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
732 737 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,705 +1,710 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 ################################################################################
6 6
7 7 [DEFAULT]
8 8 ## Debug flag sets all loggers to debug, and enables request tracking
9 9 debug = false
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 #smtp_server = mail.server.com
25 25 #smtp_username =
26 26 #smtp_password =
27 27 #smtp_port =
28 28 #smtp_use_tls = false
29 29 #smtp_use_ssl = true
30 30
31 31 [server:main]
32 32 ## COMMON ##
33 33 host = 127.0.0.1
34 34 port = 5000
35 35
36 36 ###########################################################
37 37 ## WAITRESS WSGI SERVER - Recommended for Development ####
38 38 ###########################################################
39 39
40 40 #use = egg:waitress#main
41 41 ## number of worker threads
42 42 #threads = 5
43 43 ## MAX BODY SIZE 100GB
44 44 #max_request_body_size = 107374182400
45 45 ## Use poll instead of select, fixes file descriptors limits problems.
46 46 ## May not work on old windows systems.
47 47 #asyncore_use_poll = true
48 48
49 49
50 50 ##########################
51 51 ## GUNICORN WSGI SERVER ##
52 52 ##########################
53 53 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
54 54
55 55 use = egg:gunicorn#main
56 56 ## Sets the number of process workers. More workers means more concurrent connections
57 57 ## RhodeCode can handle at the same time. Each additional worker also it increases
58 58 ## memory usage as each has it's own set of caches.
59 59 ## Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
60 60 ## than 8-10 unless for really big deployments .e.g 700-1000 users.
61 61 ## `instance_id = *` must be set in the [app:main] section below (which is the default)
62 62 ## when using more than 1 worker.
63 63 workers = 2
64 64 ## process name visible in process list
65 65 proc_name = rhodecode
66 66 ## type of worker class, one of sync, gevent
67 67 ## recommended for bigger setup is using of of other than sync one
68 68 worker_class = gevent
69 69 ## The maximum number of simultaneous clients. Valid only for Gevent
70 70 worker_connections = 10
71 71 ## max number of requests that worker will handle before being gracefully
72 72 ## restarted, could prevent memory leaks
73 73 max_requests = 1000
74 74 max_requests_jitter = 30
75 75 ## amount of time a worker can spend with handling a request before it
76 76 ## gets killed and restarted. Set to 6hrs
77 77 timeout = 21600
78 78
79 79
80 80 ## prefix middleware for RhodeCode.
81 81 ## recommended when using proxy setup.
82 82 ## allows to set RhodeCode under a prefix in server.
83 83 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
84 84 ## And set your prefix like: `prefix = /custom_prefix`
85 85 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
86 86 ## to make your cookies only work on prefix url
87 87 [filter:proxy-prefix]
88 88 use = egg:PasteDeploy#prefix
89 89 prefix = /
90 90
91 91 [app:main]
92 92 ## The %(here)s variable will be replaced with the absolute path of parent directory
93 93 ## of this file
94 94 ## In addition ENVIRONMENT variables usage is possible, e.g
95 95 ## sqlalchemy.db1.url = {ENV_RC_DB_URL}
96 96
97 97 use = egg:rhodecode-enterprise-ce
98 98
99 99 ## enable proxy prefix middleware, defined above
100 100 #filter-with = proxy-prefix
101 101
102 102 ## encryption key used to encrypt social plugin tokens,
103 103 ## remote_urls with credentials etc, if not set it defaults to
104 104 ## `beaker.session.secret`
105 105 #rhodecode.encrypted_values.secret =
106 106
107 107 ## decryption strict mode (enabled by default). It controls if decryption raises
108 108 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
109 109 #rhodecode.encrypted_values.strict = false
110 110
111 ## Pick algorithm for encryption. Either fernet (more secure) or aes (default)
112 ## fernet is safer, and we strongly recommend switching to it.
113 ## Due to backward compatibility aes is used as default.
114 #rhodecode.encrypted_values.algorithm = fernet
115
111 116 ## return gzipped responses from RhodeCode (static files/application)
112 117 gzip_responses = false
113 118
114 119 ## auto-generate javascript routes file on startup
115 120 generate_js_files = false
116 121
117 122 ## System global default language.
118 123 ## All available languages: en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
119 124 lang = en
120 125
121 126 ## Perform a full repository scan and import on each server start.
122 127 ## Settings this to true could lead to very long startup time.
123 128 startup.import_repos = false
124 129
125 130 ## Uncomment and set this path to use archive download cache.
126 131 ## Once enabled, generated archives will be cached at this location
127 132 ## and served from the cache during subsequent requests for the same archive of
128 133 ## the repository.
129 134 #archive_cache_dir = /tmp/tarballcache
130 135
131 136 ## URL at which the application is running. This is used for Bootstrapping
132 137 ## requests in context when no web request is available. Used in ishell, or
133 138 ## SSH calls. Set this for events to receive proper url for SSH calls.
134 139 app.base_url = http://rhodecode.local
135 140
136 141 ## Unique application ID. Should be a random unique string for security.
137 142 app_instance_uuid = rc-production
138 143
139 144 ## Cut off limit for large diffs (size in bytes). If overall diff size on
140 145 ## commit, or pull request exceeds this limit this diff will be displayed
141 146 ## partially. E.g 512000 == 512Kb
142 147 cut_off_limit_diff = 512000
143 148
144 149 ## Cut off limit for large files inside diffs (size in bytes). Each individual
145 150 ## file inside diff which exceeds this limit will be displayed partially.
146 151 ## E.g 128000 == 128Kb
147 152 cut_off_limit_file = 128000
148 153
149 154 ## use cached version of vcs repositories everywhere. Recommended to be `true`
150 155 vcs_full_cache = true
151 156
152 157 ## Force https in RhodeCode, fixes https redirects, assumes it's always https.
153 158 ## Normally this is controlled by proper http flags sent from http server
154 159 force_https = false
155 160
156 161 ## use Strict-Transport-Security headers
157 162 use_htsts = false
158 163
159 164 ## git rev filter option, --all is the default filter, if you need to
160 165 ## hide all refs in changelog switch this to --branches --tags
161 166 git_rev_filter = --branches --tags
162 167
163 168 # Set to true if your repos are exposed using the dumb protocol
164 169 git_update_server_info = false
165 170
166 171 ## RSS/ATOM feed options
167 172 rss_cut_off_limit = 256000
168 173 rss_items_per_page = 10
169 174 rss_include_diff = false
170 175
171 176 ## gist URL alias, used to create nicer urls for gist. This should be an
172 177 ## url that does rewrites to _admin/gists/{gistid}.
173 178 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
174 179 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
175 180 gist_alias_url =
176 181
177 182 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
178 183 ## used for access.
179 184 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
180 185 ## came from the the logged in user who own this authentication token.
181 186 ## Additionally @TOKEN syntax can be used to bound the view to specific
182 187 ## authentication token. Such view would be only accessible when used together
183 188 ## with this authentication token
184 189 ##
185 190 ## list of all views can be found under `/_admin/permissions/auth_token_access`
186 191 ## The list should be "," separated and on a single line.
187 192 ##
188 193 ## Most common views to enable:
189 194 # RepoCommitsView:repo_commit_download
190 195 # RepoCommitsView:repo_commit_patch
191 196 # RepoCommitsView:repo_commit_raw
192 197 # RepoCommitsView:repo_commit_raw@TOKEN
193 198 # RepoFilesView:repo_files_diff
194 199 # RepoFilesView:repo_archivefile
195 200 # RepoFilesView:repo_file_raw
196 201 # GistView:*
197 202 api_access_controllers_whitelist =
198 203
199 204 ## Default encoding used to convert from and to unicode
200 205 ## can be also a comma separated list of encoding in case of mixed encodings
201 206 default_encoding = UTF-8
202 207
203 208 ## instance-id prefix
204 209 ## a prefix key for this instance used for cache invalidation when running
205 210 ## multiple instances of RhodeCode, make sure it's globally unique for
206 211 ## all running RhodeCode instances. Leave empty if you don't use it
207 212 instance_id =
208 213
209 214 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
210 215 ## of an authentication plugin also if it is disabled by it's settings.
211 216 ## This could be useful if you are unable to log in to the system due to broken
212 217 ## authentication settings. Then you can enable e.g. the internal RhodeCode auth
213 218 ## module to log in again and fix the settings.
214 219 ##
215 220 ## Available builtin plugin IDs (hash is part of the ID):
216 221 ## egg:rhodecode-enterprise-ce#rhodecode
217 222 ## egg:rhodecode-enterprise-ce#pam
218 223 ## egg:rhodecode-enterprise-ce#ldap
219 224 ## egg:rhodecode-enterprise-ce#jasig_cas
220 225 ## egg:rhodecode-enterprise-ce#headers
221 226 ## egg:rhodecode-enterprise-ce#crowd
222 227 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
223 228
224 229 ## alternative return HTTP header for failed authentication. Default HTTP
225 230 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
226 231 ## handling that causing a series of failed authentication calls.
227 232 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
228 233 ## This will be served instead of default 401 on bad authentication
229 234 auth_ret_code =
230 235
231 236 ## use special detection method when serving auth_ret_code, instead of serving
232 237 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
233 238 ## and then serve auth_ret_code to clients
234 239 auth_ret_code_detection = false
235 240
236 241 ## locking return code. When repository is locked return this HTTP code. 2XX
237 242 ## codes don't break the transactions while 4XX codes do
238 243 lock_ret_code = 423
239 244
240 245 ## allows to change the repository location in settings page
241 246 allow_repo_location_change = true
242 247
243 248 ## allows to setup custom hooks in settings page
244 249 allow_custom_hooks_settings = true
245 250
246 251 ## Generated license token required for EE edition license.
247 252 ## New generated token value can be found in Admin > settings > license page.
248 253 license_token =
249 254
250 255 ## supervisor connection uri, for managing supervisor and logs.
251 256 supervisor.uri =
252 257 ## supervisord group name/id we only want this RC instance to handle
253 258 supervisor.group_id = prod
254 259
255 260 ## Display extended labs settings
256 261 labs_settings_active = true
257 262
258 263 ## Custom exception store path, defaults to TMPDIR
259 264 ## This is used to store exception from RhodeCode in shared directory
260 265 #exception_tracker.store_path =
261 266
262 267 ## File store configuration. This is used to store and serve uploaded files
263 268 file_store.enabled = true
264 269 ## Storage backend, available options are: local
265 270 file_store.backend = local
266 271 ## path to store the uploaded binaries
267 272 file_store.storage_path = %(here)s/data/file_store
268 273
269 274
270 275 ####################################
271 276 ### CELERY CONFIG ####
272 277 ####################################
273 278 ## run: /path/to/celery worker \
274 279 ## -E --beat --app rhodecode.lib.celerylib.loader \
275 280 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
276 281 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
277 282
278 283 use_celery = false
279 284
280 285 ## connection url to the message broker (default rabbitmq)
281 286 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
282 287
283 288 ## maximum tasks to execute before worker restart
284 289 celery.max_tasks_per_child = 100
285 290
286 291 ## tasks will never be sent to the queue, but executed locally instead.
287 292 celery.task_always_eager = false
288 293
289 294 #####################################
290 295 ### DOGPILE CACHE ####
291 296 #####################################
292 297 ## Default cache dir for caches. Putting this into a ramdisk
293 298 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
294 299 ## large amount of space
295 300 cache_dir = %(here)s/data
296 301
297 302 ## `cache_perms` cache settings for permission tree, auth TTL.
298 303 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
299 304 rc_cache.cache_perms.expiration_time = 300
300 305
301 306 ## alternative `cache_perms` redis backend with distributed lock
302 307 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
303 308 #rc_cache.cache_perms.expiration_time = 300
304 309 ## redis_expiration_time needs to be greater then expiration_time
305 310 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
306 311 #rc_cache.cache_perms.arguments.socket_timeout = 30
307 312 #rc_cache.cache_perms.arguments.host = localhost
308 313 #rc_cache.cache_perms.arguments.port = 6379
309 314 #rc_cache.cache_perms.arguments.db = 0
310 315 ## more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
311 316 #rc_cache.cache_perms.arguments.distributed_lock = true
312 317
313 318 ## `cache_repo` cache settings for FileTree, Readme, RSS FEEDS
314 319 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
315 320 rc_cache.cache_repo.expiration_time = 2592000
316 321
317 322 ## alternative `cache_repo` redis backend with distributed lock
318 323 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
319 324 #rc_cache.cache_repo.expiration_time = 2592000
320 325 ## redis_expiration_time needs to be greater then expiration_time
321 326 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
322 327 #rc_cache.cache_repo.arguments.socket_timeout = 30
323 328 #rc_cache.cache_repo.arguments.host = localhost
324 329 #rc_cache.cache_repo.arguments.port = 6379
325 330 #rc_cache.cache_repo.arguments.db = 1
326 331 ## more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
327 332 #rc_cache.cache_repo.arguments.distributed_lock = true
328 333
329 334 ## cache settings for SQL queries, this needs to use memory type backend
330 335 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
331 336 rc_cache.sql_cache_short.expiration_time = 30
332 337
333 338 ## `cache_repo_longterm` cache for repo object instances, this needs to use memory
334 339 ## type backend as the objects kept are not pickle serializable
335 340 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
336 341 ## by default we use 96H, this is using invalidation on push anyway
337 342 rc_cache.cache_repo_longterm.expiration_time = 345600
338 343 ## max items in LRU cache, reduce this number to save memory, and expire last used
339 344 ## cached objects
340 345 rc_cache.cache_repo_longterm.max_size = 10000
341 346
342 347
343 348 ####################################
344 349 ### BEAKER SESSION ####
345 350 ####################################
346 351
347 352 ## .session.type is type of storage options for the session, current allowed
348 353 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
349 354 beaker.session.type = file
350 355 beaker.session.data_dir = %(here)s/data/sessions
351 356
352 357 ## db based session, fast, and allows easy management over logged in users
353 358 #beaker.session.type = ext:database
354 359 #beaker.session.table_name = db_session
355 360 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
356 361 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
357 362 #beaker.session.sa.pool_recycle = 3600
358 363 #beaker.session.sa.echo = false
359 364
360 365 beaker.session.key = rhodecode
361 366 beaker.session.secret = production-rc-uytcxaz
362 367 beaker.session.lock_dir = %(here)s/data/sessions/lock
363 368
364 369 ## Secure encrypted cookie. Requires AES and AES python libraries
365 370 ## you must disable beaker.session.secret to use this
366 371 #beaker.session.encrypt_key = key_for_encryption
367 372 #beaker.session.validate_key = validation_key
368 373
369 374 ## sets session as invalid(also logging out user) if it haven not been
370 375 ## accessed for given amount of time in seconds
371 376 beaker.session.timeout = 2592000
372 377 beaker.session.httponly = true
373 378 ## Path to use for the cookie. Set to prefix if you use prefix middleware
374 379 #beaker.session.cookie_path = /custom_prefix
375 380
376 381 ## uncomment for https secure cookie
377 382 beaker.session.secure = false
378 383
379 384 ## auto save the session to not to use .save()
380 385 beaker.session.auto = false
381 386
382 387 ## default cookie expiration time in seconds, set to `true` to set expire
383 388 ## at browser close
384 389 #beaker.session.cookie_expires = 3600
385 390
386 391 ###################################
387 392 ## SEARCH INDEXING CONFIGURATION ##
388 393 ###################################
389 394 ## Full text search indexer is available in rhodecode-tools under
390 395 ## `rhodecode-tools index` command
391 396
392 397 ## WHOOSH Backend, doesn't require additional services to run
393 398 ## it works good with few dozen repos
394 399 search.module = rhodecode.lib.index.whoosh
395 400 search.location = %(here)s/data/index
396 401
397 402 ########################################
398 403 ### CHANNELSTREAM CONFIG ####
399 404 ########################################
400 405 ## channelstream enables persistent connections and live notification
401 406 ## in the system. It's also used by the chat system
402 407
403 408 channelstream.enabled = false
404 409
405 410 ## server address for channelstream server on the backend
406 411 channelstream.server = 127.0.0.1:9800
407 412
408 413 ## location of the channelstream server from outside world
409 414 ## use ws:// for http or wss:// for https. This address needs to be handled
410 415 ## by external HTTP server such as Nginx or Apache
411 416 ## see Nginx/Apache configuration examples in our docs
412 417 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
413 418 channelstream.secret = secret
414 419 channelstream.history.location = %(here)s/channelstream_history
415 420
416 421 ## Internal application path that Javascript uses to connect into.
417 422 ## If you use proxy-prefix the prefix should be added before /_channelstream
418 423 channelstream.proxy_path = /_channelstream
419 424
420 425
421 426 ###################################
422 427 ## APPENLIGHT CONFIG ##
423 428 ###################################
424 429
425 430 ## Appenlight is tailored to work with RhodeCode, see
426 431 ## http://appenlight.com for details how to obtain an account
427 432
428 433 ## Appenlight integration enabled
429 434 appenlight = false
430 435
431 436 appenlight.server_url = https://api.appenlight.com
432 437 appenlight.api_key = YOUR_API_KEY
433 438 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
434 439
435 440 ## used for JS client
436 441 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
437 442
438 443 ## TWEAK AMOUNT OF INFO SENT HERE
439 444
440 445 ## enables 404 error logging (default False)
441 446 appenlight.report_404 = false
442 447
443 448 ## time in seconds after request is considered being slow (default 1)
444 449 appenlight.slow_request_time = 1
445 450
446 451 ## record slow requests in application
447 452 ## (needs to be enabled for slow datastore recording and time tracking)
448 453 appenlight.slow_requests = true
449 454
450 455 ## enable hooking to application loggers
451 456 appenlight.logging = true
452 457
453 458 ## minimum log level for log capture
454 459 appenlight.logging.level = WARNING
455 460
456 461 ## send logs only from erroneous/slow requests
457 462 ## (saves API quota for intensive logging)
458 463 appenlight.logging_on_error = false
459 464
460 465 ## list of additional keywords that should be grabbed from environ object
461 466 ## can be string with comma separated list of words in lowercase
462 467 ## (by default client will always send following info:
463 468 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
464 469 ## start with HTTP* this list be extended with additional keywords here
465 470 appenlight.environ_keys_whitelist =
466 471
467 472 ## list of keywords that should be blanked from request object
468 473 ## can be string with comma separated list of words in lowercase
469 474 ## (by default client will always blank keys that contain following words
470 475 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
471 476 ## this list be extended with additional keywords set here
472 477 appenlight.request_keys_blacklist =
473 478
474 479 ## list of namespaces that should be ignores when gathering log entries
475 480 ## can be string with comma separated list of namespaces
476 481 ## (by default the client ignores own entries: appenlight_client.client)
477 482 appenlight.log_namespace_blacklist =
478 483
479 484
480 485 ###########################################
481 486 ### MAIN RHODECODE DATABASE CONFIG ###
482 487 ###########################################
483 488 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
484 489 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
485 490 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
486 491 # pymysql is an alternative driver for MySQL, use in case of problems with default one
487 492 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
488 493
489 494 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
490 495
491 496 # see sqlalchemy docs for other advanced settings
492 497
493 498 ## print the sql statements to output
494 499 sqlalchemy.db1.echo = false
495 500 ## recycle the connections after this amount of seconds
496 501 sqlalchemy.db1.pool_recycle = 3600
497 502 sqlalchemy.db1.convert_unicode = true
498 503
499 504 ## the number of connections to keep open inside the connection pool.
500 505 ## 0 indicates no limit
501 506 #sqlalchemy.db1.pool_size = 5
502 507
503 508 ## the number of connections to allow in connection pool "overflow", that is
504 509 ## connections that can be opened above and beyond the pool_size setting,
505 510 ## which defaults to five.
506 511 #sqlalchemy.db1.max_overflow = 10
507 512
508 513 ## Connection check ping, used to detect broken database connections
509 514 ## could be enabled to better handle cases if MySQL has gone away errors
510 515 #sqlalchemy.db1.ping_connection = true
511 516
512 517 ##################
513 518 ### VCS CONFIG ###
514 519 ##################
515 520 vcs.server.enable = true
516 521 vcs.server = localhost:9900
517 522
518 523 ## Web server connectivity protocol, responsible for web based VCS operations
519 524 ## Available protocols are:
520 525 ## `http` - use http-rpc backend (default)
521 526 vcs.server.protocol = http
522 527
523 528 ## Push/Pull operations protocol, available options are:
524 529 ## `http` - use http-rpc backend (default)
525 530 vcs.scm_app_implementation = http
526 531
527 532 ## Push/Pull operations hooks protocol, available options are:
528 533 ## `http` - use http-rpc backend (default)
529 534 vcs.hooks.protocol = http
530 535
531 536 ## Host on which this instance is listening for hooks. If vcsserver is in other location
532 537 ## this should be adjusted.
533 538 vcs.hooks.host = 127.0.0.1
534 539
535 540 vcs.server.log_level = info
536 541 ## Start VCSServer with this instance as a subprocess, useful for development
537 542 vcs.start_server = false
538 543
539 544 ## List of enabled VCS backends, available options are:
540 545 ## `hg` - mercurial
541 546 ## `git` - git
542 547 ## `svn` - subversion
543 548 vcs.backends = hg, git, svn
544 549
545 550 vcs.connection_timeout = 3600
546 551 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
547 552 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
548 553 #vcs.svn.compatible_version = pre-1.8-compatible
549 554
550 555
551 556 ############################################################
552 557 ### Subversion proxy support (mod_dav_svn) ###
553 558 ### Maps RhodeCode repo groups into SVN paths for Apache ###
554 559 ############################################################
555 560 ## Enable or disable the config file generation.
556 561 svn.proxy.generate_config = false
557 562 ## Generate config file with `SVNListParentPath` set to `On`.
558 563 svn.proxy.list_parent_path = true
559 564 ## Set location and file name of generated config file.
560 565 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
561 566 ## alternative mod_dav config template. This needs to be a mako template
562 567 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
563 568 ## Used as a prefix to the `Location` block in the generated config file.
564 569 ## In most cases it should be set to `/`.
565 570 svn.proxy.location_root = /
566 571 ## Command to reload the mod dav svn configuration on change.
567 572 ## Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
568 573 ## Make sure user who runs RhodeCode process is allowed to reload Apache
569 574 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
570 575 ## If the timeout expires before the reload command finishes, the command will
571 576 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
572 577 #svn.proxy.reload_timeout = 10
573 578
574 579 ############################################################
575 580 ### SSH Support Settings ###
576 581 ############################################################
577 582
578 583 ## Defines if a custom authorized_keys file should be created and written on
579 584 ## any change user ssh keys. Setting this to false also disables possibility
580 585 ## of adding SSH keys by users from web interface. Super admins can still
581 586 ## manage SSH Keys.
582 587 ssh.generate_authorized_keyfile = false
583 588
584 589 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
585 590 # ssh.authorized_keys_ssh_opts =
586 591
587 592 ## Path to the authorized_keys file where the generate entries are placed.
588 593 ## It is possible to have multiple key files specified in `sshd_config` e.g.
589 594 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
590 595 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
591 596
592 597 ## Command to execute the SSH wrapper. The binary is available in the
593 598 ## RhodeCode installation directory.
594 599 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
595 600 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
596 601
597 602 ## Allow shell when executing the ssh-wrapper command
598 603 ssh.wrapper_cmd_allow_shell = false
599 604
600 605 ## Enables logging, and detailed output send back to the client during SSH
601 606 ## operations. Useful for debugging, shouldn't be used in production.
602 607 ssh.enable_debug_logging = false
603 608
604 609 ## Paths to binary executable, by default they are the names, but we can
605 610 ## override them if we want to use a custom one
606 611 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
607 612 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
608 613 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
609 614
610 615 ## Enables SSH key generator web interface. Disabling this still allows users
611 616 ## to add their own keys.
612 617 ssh.enable_ui_key_generator = true
613 618
614 619
615 620 ## Dummy marker to add new entries after.
616 621 ## Add any custom entries below. Please don't remove.
617 622 custom.conf = 1
618 623
619 624
620 625 ################################
621 626 ### LOGGING CONFIGURATION ####
622 627 ################################
623 628 [loggers]
624 629 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
625 630
626 631 [handlers]
627 632 keys = console, console_sql
628 633
629 634 [formatters]
630 635 keys = generic, color_formatter, color_formatter_sql
631 636
632 637 #############
633 638 ## LOGGERS ##
634 639 #############
635 640 [logger_root]
636 641 level = NOTSET
637 642 handlers = console
638 643
639 644 [logger_sqlalchemy]
640 645 level = INFO
641 646 handlers = console_sql
642 647 qualname = sqlalchemy.engine
643 648 propagate = 0
644 649
645 650 [logger_beaker]
646 651 level = DEBUG
647 652 handlers =
648 653 qualname = beaker.container
649 654 propagate = 1
650 655
651 656 [logger_rhodecode]
652 657 level = DEBUG
653 658 handlers =
654 659 qualname = rhodecode
655 660 propagate = 1
656 661
657 662 [logger_ssh_wrapper]
658 663 level = DEBUG
659 664 handlers =
660 665 qualname = ssh_wrapper
661 666 propagate = 1
662 667
663 668 [logger_celery]
664 669 level = DEBUG
665 670 handlers =
666 671 qualname = celery
667 672
668 673
669 674 ##############
670 675 ## HANDLERS ##
671 676 ##############
672 677
673 678 [handler_console]
674 679 class = StreamHandler
675 680 args = (sys.stderr, )
676 681 level = INFO
677 682 formatter = generic
678 683
679 684 [handler_console_sql]
680 685 # "level = DEBUG" logs SQL queries and results.
681 686 # "level = INFO" logs SQL queries.
682 687 # "level = WARN" logs neither. (Recommended for production systems.)
683 688 class = StreamHandler
684 689 args = (sys.stderr, )
685 690 level = WARN
686 691 formatter = generic
687 692
688 693 ################
689 694 ## FORMATTERS ##
690 695 ################
691 696
692 697 [formatter_generic]
693 698 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
694 699 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
695 700 datefmt = %Y-%m-%d %H:%M:%S
696 701
697 702 [formatter_color_formatter]
698 703 class = rhodecode.lib.logging_formatter.ColorFormatter
699 704 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
700 705 datefmt = %Y-%m-%d %H:%M:%S
701 706
702 707 [formatter_color_formatter_sql]
703 708 class = rhodecode.lib.logging_formatter.ColorFormatterSql
704 709 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
705 710 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,114 +1,138 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Generic encryption library for RhodeCode
24 24 """
25 25
26 26 import base64
27 27
28 28 from Crypto.Cipher import AES
29 29 from Crypto import Random
30 30 from Crypto.Hash import HMAC, SHA256
31 31
32 32 from rhodecode.lib.utils2 import safe_str
33 33
34 34
35 35 class SignatureVerificationError(Exception):
36 36 pass
37 37
38 38
39 39 class InvalidDecryptedValue(str):
40 40
41 41 def __new__(cls, content):
42 42 """
43 43 This will generate something like this::
44 44 <InvalidDecryptedValue(QkWusFgLJXR6m42v...)>
45 45 And represent a safe indicator that encryption key is broken
46 46 """
47 47 content = '<{}({}...)>'.format(cls.__name__, content[:16])
48 48 return str.__new__(cls, content)
49 49
50 50
51 51 class AESCipher(object):
52 52 def __init__(self, key, hmac=False, strict_verification=True):
53 53 if not key:
54 54 raise ValueError('passed key variable is empty')
55 55 self.strict_verification = strict_verification
56 56 self.block_size = 32
57 57 self.hmac_size = 32
58 58 self.hmac = hmac
59 59
60 60 self.key = SHA256.new(safe_str(key)).digest()
61 61 self.hmac_key = SHA256.new(self.key).digest()
62 62
63 63 def verify_hmac_signature(self, raw_data):
64 64 org_hmac_signature = raw_data[-self.hmac_size:]
65 65 data_without_sig = raw_data[:-self.hmac_size]
66 66 recomputed_hmac = HMAC.new(
67 67 self.hmac_key, data_without_sig, digestmod=SHA256).digest()
68 68 return org_hmac_signature == recomputed_hmac
69 69
70 70 def encrypt(self, raw):
71 71 raw = self._pad(raw)
72 72 iv = Random.new().read(AES.block_size)
73 73 cipher = AES.new(self.key, AES.MODE_CBC, iv)
74 74 enc_value = cipher.encrypt(raw)
75 75
76 76 hmac_signature = ''
77 77 if self.hmac:
78 78 # compute hmac+sha256 on iv + enc text, we use
79 79 # encrypt then mac method to create the signature
80 80 hmac_signature = HMAC.new(
81 81 self.hmac_key, iv + enc_value, digestmod=SHA256).digest()
82 82
83 83 return base64.b64encode(iv + enc_value + hmac_signature)
84 84
85 85 def decrypt(self, enc):
86 86 enc_org = enc
87 87 enc = base64.b64decode(enc)
88 88
89 89 if self.hmac and len(enc) > self.hmac_size:
90 90 if self.verify_hmac_signature(enc):
91 91 # cut off the HMAC verification digest
92 92 enc = enc[:-self.hmac_size]
93 93 else:
94 94 if self.strict_verification:
95 95 raise SignatureVerificationError(
96 96 "Encryption signature verification failed. "
97 97 "Please check your secret key, and/or encrypted value. "
98 98 "Secret key is stored as "
99 99 "`rhodecode.encrypted_values.secret` or "
100 100 "`beaker.session.secret` inside .ini file")
101 101
102 102 return InvalidDecryptedValue(enc_org)
103 103
104 104 iv = enc[:AES.block_size]
105 105 cipher = AES.new(self.key, AES.MODE_CBC, iv)
106 106 return self._unpad(cipher.decrypt(enc[AES.block_size:]))
107 107
108 108 def _pad(self, s):
109 109 return (s + (self.block_size - len(s) % self.block_size)
110 110 * chr(self.block_size - len(s) % self.block_size))
111 111
112 112 @staticmethod
113 113 def _unpad(s):
114 return s[:-ord(s[len(s)-1:])] No newline at end of file
114 return s[:-ord(s[len(s)-1:])]
115
116
117 def validate_and_get_enc_data(enc_data, enc_key, enc_strict_mode):
118 parts = enc_data.split('$', 3)
119 if not len(parts) == 3:
120 # probably not encrypted values
121 return enc_data
122 else:
123 if parts[0] != 'enc':
124 # parts ok but without our header ?
125 return enc_data
126
127 # at that stage we know it's our encryption
128 if parts[1] == 'aes':
129 decrypted_data = AESCipher(enc_key).decrypt(parts[2])
130 elif parts[1] == 'aes_hmac':
131 decrypted_data = AESCipher(
132 enc_key, hmac=True,
133 strict_verification=enc_strict_mode).decrypt(parts[2])
134 else:
135 raise ValueError(
136 'Encryption type part is wrong, must be `aes` '
137 'or `aes_hmac`, got `%s` instead' % (parts[1]))
138 return decrypted_data
@@ -1,4972 +1,4974 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from zope.cachedescriptors.property import Lazy as LazyProperty
51 51 from pyramid import compat
52 52 from pyramid.threadlocal import get_current_request
53 53
54 54 from rhodecode.translation import _
55 55 from rhodecode.lib.vcs import get_vcs_instance
56 56 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 57 from rhodecode.lib.utils2 import (
58 58 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
59 59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 60 glob2re, StrictAttributeDict, cleaned_uri)
61 61 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 62 JsonRaw
63 63 from rhodecode.lib.ext_json import json
64 64 from rhodecode.lib.caching_query import FromCache
65 from rhodecode.lib.encrypt import AESCipher
66
65 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
66 from rhodecode.lib.encrypt2 import Encryptor
67 67 from rhodecode.model.meta import Base, Session
68 68
69 69 URL_SEP = '/'
70 70 log = logging.getLogger(__name__)
71 71
72 72 # =============================================================================
73 73 # BASE CLASSES
74 74 # =============================================================================
75 75
76 76 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 77 # beaker.session.secret if first is not set.
78 78 # and initialized at environment.py
79 79 ENCRYPTION_KEY = None
80 80
81 81 # used to sort permissions by types, '#' used here is not allowed to be in
82 82 # usernames, and it's very early in sorted string.printable table.
83 83 PERMISSION_TYPE_SORT = {
84 84 'admin': '####',
85 85 'write': '###',
86 86 'read': '##',
87 87 'none': '#',
88 88 }
89 89
90 90
91 91 def display_user_sort(obj):
92 92 """
93 93 Sort function used to sort permissions in .permissions() function of
94 94 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 95 of all other resources
96 96 """
97 97
98 98 if obj.username == User.DEFAULT_USER:
99 99 return '#####'
100 100 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 101 return prefix + obj.username
102 102
103 103
104 104 def display_user_group_sort(obj):
105 105 """
106 106 Sort function used to sort permissions in .permissions() function of
107 107 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 108 of all other resources
109 109 """
110 110
111 111 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 112 return prefix + obj.users_group_name
113 113
114 114
115 115 def _hash_key(k):
116 116 return sha1_safe(k)
117 117
118 118
119 119 def in_filter_generator(qry, items, limit=500):
120 120 """
121 121 Splits IN() into multiple with OR
122 122 e.g.::
123 123 cnt = Repository.query().filter(
124 124 or_(
125 125 *in_filter_generator(Repository.repo_id, range(100000))
126 126 )).count()
127 127 """
128 128 if not items:
129 129 # empty list will cause empty query which might cause security issues
130 130 # this can lead to hidden unpleasant results
131 131 items = [-1]
132 132
133 133 parts = []
134 134 for chunk in xrange(0, len(items), limit):
135 135 parts.append(
136 136 qry.in_(items[chunk: chunk + limit])
137 137 )
138 138
139 139 return parts
140 140
141 141
142 142 base_table_args = {
143 143 'extend_existing': True,
144 144 'mysql_engine': 'InnoDB',
145 145 'mysql_charset': 'utf8',
146 146 'sqlite_autoincrement': True
147 147 }
148 148
149 149
150 150 class EncryptedTextValue(TypeDecorator):
151 151 """
152 152 Special column for encrypted long text data, use like::
153 153
154 154 value = Column("encrypted_value", EncryptedValue(), nullable=False)
155 155
156 156 This column is intelligent so if value is in unencrypted form it return
157 157 unencrypted form, but on save it always encrypts
158 158 """
159 159 impl = Text
160 160
161 161 def process_bind_param(self, value, dialect):
162 if not value:
163 return value
164 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
165 # protect against double encrypting if someone manually starts
166 # doing
167 raise ValueError('value needs to be in unencrypted format, ie. '
168 'not starting with enc$aes')
169 return 'enc$aes_hmac$%s' % AESCipher(
170 ENCRYPTION_KEY, hmac=True).encrypt(value)
171
172 def process_result_value(self, value, dialect):
162 """
163 Setter for storing value
164 """
173 165 import rhodecode
174
175 166 if not value:
176 167 return value
177 168
178 parts = value.split('$', 3)
179 if not len(parts) == 3:
180 # probably not encrypted values
181 return value
169 # protect against double encrypting if values is already encrypted
170 if value.startswith('enc$aes$') \
171 or value.startswith('enc$aes_hmac$') \
172 or value.startswith('enc2$'):
173 raise ValueError('value needs to be in unencrypted format, '
174 'ie. not starting with enc$ or enc2$')
175
176 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
177 if algo == 'aes':
178 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
179 elif algo == 'fernet':
180 return Encryptor(ENCRYPTION_KEY).encrypt(value)
182 181 else:
183 if parts[0] != 'enc':
184 # parts ok but without our header ?
185 return value
186 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
187 'rhodecode.encrypted_values.strict') or True)
188 # at that stage we know it's our encryption
189 if parts[1] == 'aes':
190 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
191 elif parts[1] == 'aes_hmac':
192 decrypted_data = AESCipher(
193 ENCRYPTION_KEY, hmac=True,
194 strict_verification=enc_strict_mode).decrypt(parts[2])
195 else:
196 raise ValueError(
197 'Encryption type part is wrong, must be `aes` '
198 'or `aes_hmac`, got `%s` instead' % (parts[1]))
199 return decrypted_data
182 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
183
184 def process_result_value(self, value, dialect):
185 """
186 Getter for retrieving value
187 """
188
189 import rhodecode
190 if not value:
191 return value
192
193 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
194 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
195 if algo == 'aes':
196 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
197 elif algo == 'fernet':
198 return Encryptor(ENCRYPTION_KEY).decrypt(value)
199 else:
200 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
201 return decrypted_data
200 202
201 203
202 204 class BaseModel(object):
203 205 """
204 206 Base Model for all classes
205 207 """
206 208
207 209 @classmethod
208 210 def _get_keys(cls):
209 211 """return column names for this model """
210 212 return class_mapper(cls).c.keys()
211 213
212 214 def get_dict(self):
213 215 """
214 216 return dict with keys and values corresponding
215 217 to this model data """
216 218
217 219 d = {}
218 220 for k in self._get_keys():
219 221 d[k] = getattr(self, k)
220 222
221 223 # also use __json__() if present to get additional fields
222 224 _json_attr = getattr(self, '__json__', None)
223 225 if _json_attr:
224 226 # update with attributes from __json__
225 227 if callable(_json_attr):
226 228 _json_attr = _json_attr()
227 229 for k, val in _json_attr.iteritems():
228 230 d[k] = val
229 231 return d
230 232
231 233 def get_appstruct(self):
232 234 """return list with keys and values tuples corresponding
233 235 to this model data """
234 236
235 237 lst = []
236 238 for k in self._get_keys():
237 239 lst.append((k, getattr(self, k),))
238 240 return lst
239 241
240 242 def populate_obj(self, populate_dict):
241 243 """populate model with data from given populate_dict"""
242 244
243 245 for k in self._get_keys():
244 246 if k in populate_dict:
245 247 setattr(self, k, populate_dict[k])
246 248
247 249 @classmethod
248 250 def query(cls):
249 251 return Session().query(cls)
250 252
251 253 @classmethod
252 254 def get(cls, id_):
253 255 if id_:
254 256 return cls.query().get(id_)
255 257
256 258 @classmethod
257 259 def get_or_404(cls, id_):
258 260 from pyramid.httpexceptions import HTTPNotFound
259 261
260 262 try:
261 263 id_ = int(id_)
262 264 except (TypeError, ValueError):
263 265 raise HTTPNotFound()
264 266
265 267 res = cls.query().get(id_)
266 268 if not res:
267 269 raise HTTPNotFound()
268 270 return res
269 271
270 272 @classmethod
271 273 def getAll(cls):
272 274 # deprecated and left for backward compatibility
273 275 return cls.get_all()
274 276
275 277 @classmethod
276 278 def get_all(cls):
277 279 return cls.query().all()
278 280
279 281 @classmethod
280 282 def delete(cls, id_):
281 283 obj = cls.query().get(id_)
282 284 Session().delete(obj)
283 285
284 286 @classmethod
285 287 def identity_cache(cls, session, attr_name, value):
286 288 exist_in_session = []
287 289 for (item_cls, pkey), instance in session.identity_map.items():
288 290 if cls == item_cls and getattr(instance, attr_name) == value:
289 291 exist_in_session.append(instance)
290 292 if exist_in_session:
291 293 if len(exist_in_session) == 1:
292 294 return exist_in_session[0]
293 295 log.exception(
294 296 'multiple objects with attr %s and '
295 297 'value %s found with same name: %r',
296 298 attr_name, value, exist_in_session)
297 299
298 300 def __repr__(self):
299 301 if hasattr(self, '__unicode__'):
300 302 # python repr needs to return str
301 303 try:
302 304 return safe_str(self.__unicode__())
303 305 except UnicodeDecodeError:
304 306 pass
305 307 return '<DB:%s>' % (self.__class__.__name__)
306 308
307 309
308 310 class RhodeCodeSetting(Base, BaseModel):
309 311 __tablename__ = 'rhodecode_settings'
310 312 __table_args__ = (
311 313 UniqueConstraint('app_settings_name'),
312 314 base_table_args
313 315 )
314 316
315 317 SETTINGS_TYPES = {
316 318 'str': safe_str,
317 319 'int': safe_int,
318 320 'unicode': safe_unicode,
319 321 'bool': str2bool,
320 322 'list': functools.partial(aslist, sep=',')
321 323 }
322 324 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
323 325 GLOBAL_CONF_KEY = 'app_settings'
324 326
325 327 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
326 328 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
327 329 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
328 330 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
329 331
330 332 def __init__(self, key='', val='', type='unicode'):
331 333 self.app_settings_name = key
332 334 self.app_settings_type = type
333 335 self.app_settings_value = val
334 336
335 337 @validates('_app_settings_value')
336 338 def validate_settings_value(self, key, val):
337 339 assert type(val) == unicode
338 340 return val
339 341
340 342 @hybrid_property
341 343 def app_settings_value(self):
342 344 v = self._app_settings_value
343 345 _type = self.app_settings_type
344 346 if _type:
345 347 _type = self.app_settings_type.split('.')[0]
346 348 # decode the encrypted value
347 349 if 'encrypted' in self.app_settings_type:
348 350 cipher = EncryptedTextValue()
349 351 v = safe_unicode(cipher.process_result_value(v, None))
350 352
351 353 converter = self.SETTINGS_TYPES.get(_type) or \
352 354 self.SETTINGS_TYPES['unicode']
353 355 return converter(v)
354 356
355 357 @app_settings_value.setter
356 358 def app_settings_value(self, val):
357 359 """
358 360 Setter that will always make sure we use unicode in app_settings_value
359 361
360 362 :param val:
361 363 """
362 364 val = safe_unicode(val)
363 365 # encode the encrypted value
364 366 if 'encrypted' in self.app_settings_type:
365 367 cipher = EncryptedTextValue()
366 368 val = safe_unicode(cipher.process_bind_param(val, None))
367 369 self._app_settings_value = val
368 370
369 371 @hybrid_property
370 372 def app_settings_type(self):
371 373 return self._app_settings_type
372 374
373 375 @app_settings_type.setter
374 376 def app_settings_type(self, val):
375 377 if val.split('.')[0] not in self.SETTINGS_TYPES:
376 378 raise Exception('type must be one of %s got %s'
377 379 % (self.SETTINGS_TYPES.keys(), val))
378 380 self._app_settings_type = val
379 381
380 382 @classmethod
381 383 def get_by_prefix(cls, prefix):
382 384 return RhodeCodeSetting.query()\
383 385 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
384 386 .all()
385 387
386 388 def __unicode__(self):
387 389 return u"<%s('%s:%s[%s]')>" % (
388 390 self.__class__.__name__,
389 391 self.app_settings_name, self.app_settings_value,
390 392 self.app_settings_type
391 393 )
392 394
393 395
394 396 class RhodeCodeUi(Base, BaseModel):
395 397 __tablename__ = 'rhodecode_ui'
396 398 __table_args__ = (
397 399 UniqueConstraint('ui_key'),
398 400 base_table_args
399 401 )
400 402
401 403 HOOK_REPO_SIZE = 'changegroup.repo_size'
402 404 # HG
403 405 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
404 406 HOOK_PULL = 'outgoing.pull_logger'
405 407 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
406 408 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
407 409 HOOK_PUSH = 'changegroup.push_logger'
408 410 HOOK_PUSH_KEY = 'pushkey.key_push'
409 411
410 412 # TODO: johbo: Unify way how hooks are configured for git and hg,
411 413 # git part is currently hardcoded.
412 414
413 415 # SVN PATTERNS
414 416 SVN_BRANCH_ID = 'vcs_svn_branch'
415 417 SVN_TAG_ID = 'vcs_svn_tag'
416 418
417 419 ui_id = Column(
418 420 "ui_id", Integer(), nullable=False, unique=True, default=None,
419 421 primary_key=True)
420 422 ui_section = Column(
421 423 "ui_section", String(255), nullable=True, unique=None, default=None)
422 424 ui_key = Column(
423 425 "ui_key", String(255), nullable=True, unique=None, default=None)
424 426 ui_value = Column(
425 427 "ui_value", String(255), nullable=True, unique=None, default=None)
426 428 ui_active = Column(
427 429 "ui_active", Boolean(), nullable=True, unique=None, default=True)
428 430
429 431 def __repr__(self):
430 432 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
431 433 self.ui_key, self.ui_value)
432 434
433 435
434 436 class RepoRhodeCodeSetting(Base, BaseModel):
435 437 __tablename__ = 'repo_rhodecode_settings'
436 438 __table_args__ = (
437 439 UniqueConstraint(
438 440 'app_settings_name', 'repository_id',
439 441 name='uq_repo_rhodecode_setting_name_repo_id'),
440 442 base_table_args
441 443 )
442 444
443 445 repository_id = Column(
444 446 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
445 447 nullable=False)
446 448 app_settings_id = Column(
447 449 "app_settings_id", Integer(), nullable=False, unique=True,
448 450 default=None, primary_key=True)
449 451 app_settings_name = Column(
450 452 "app_settings_name", String(255), nullable=True, unique=None,
451 453 default=None)
452 454 _app_settings_value = Column(
453 455 "app_settings_value", String(4096), nullable=True, unique=None,
454 456 default=None)
455 457 _app_settings_type = Column(
456 458 "app_settings_type", String(255), nullable=True, unique=None,
457 459 default=None)
458 460
459 461 repository = relationship('Repository')
460 462
461 463 def __init__(self, repository_id, key='', val='', type='unicode'):
462 464 self.repository_id = repository_id
463 465 self.app_settings_name = key
464 466 self.app_settings_type = type
465 467 self.app_settings_value = val
466 468
467 469 @validates('_app_settings_value')
468 470 def validate_settings_value(self, key, val):
469 471 assert type(val) == unicode
470 472 return val
471 473
472 474 @hybrid_property
473 475 def app_settings_value(self):
474 476 v = self._app_settings_value
475 477 type_ = self.app_settings_type
476 478 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
477 479 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
478 480 return converter(v)
479 481
480 482 @app_settings_value.setter
481 483 def app_settings_value(self, val):
482 484 """
483 485 Setter that will always make sure we use unicode in app_settings_value
484 486
485 487 :param val:
486 488 """
487 489 self._app_settings_value = safe_unicode(val)
488 490
489 491 @hybrid_property
490 492 def app_settings_type(self):
491 493 return self._app_settings_type
492 494
493 495 @app_settings_type.setter
494 496 def app_settings_type(self, val):
495 497 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
496 498 if val not in SETTINGS_TYPES:
497 499 raise Exception('type must be one of %s got %s'
498 500 % (SETTINGS_TYPES.keys(), val))
499 501 self._app_settings_type = val
500 502
501 503 def __unicode__(self):
502 504 return u"<%s('%s:%s:%s[%s]')>" % (
503 505 self.__class__.__name__, self.repository.repo_name,
504 506 self.app_settings_name, self.app_settings_value,
505 507 self.app_settings_type
506 508 )
507 509
508 510
509 511 class RepoRhodeCodeUi(Base, BaseModel):
510 512 __tablename__ = 'repo_rhodecode_ui'
511 513 __table_args__ = (
512 514 UniqueConstraint(
513 515 'repository_id', 'ui_section', 'ui_key',
514 516 name='uq_repo_rhodecode_ui_repository_id_section_key'),
515 517 base_table_args
516 518 )
517 519
518 520 repository_id = Column(
519 521 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
520 522 nullable=False)
521 523 ui_id = Column(
522 524 "ui_id", Integer(), nullable=False, unique=True, default=None,
523 525 primary_key=True)
524 526 ui_section = Column(
525 527 "ui_section", String(255), nullable=True, unique=None, default=None)
526 528 ui_key = Column(
527 529 "ui_key", String(255), nullable=True, unique=None, default=None)
528 530 ui_value = Column(
529 531 "ui_value", String(255), nullable=True, unique=None, default=None)
530 532 ui_active = Column(
531 533 "ui_active", Boolean(), nullable=True, unique=None, default=True)
532 534
533 535 repository = relationship('Repository')
534 536
535 537 def __repr__(self):
536 538 return '<%s[%s:%s]%s=>%s]>' % (
537 539 self.__class__.__name__, self.repository.repo_name,
538 540 self.ui_section, self.ui_key, self.ui_value)
539 541
540 542
541 543 class User(Base, BaseModel):
542 544 __tablename__ = 'users'
543 545 __table_args__ = (
544 546 UniqueConstraint('username'), UniqueConstraint('email'),
545 547 Index('u_username_idx', 'username'),
546 548 Index('u_email_idx', 'email'),
547 549 base_table_args
548 550 )
549 551
550 552 DEFAULT_USER = 'default'
551 553 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
552 554 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
553 555
554 556 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
555 557 username = Column("username", String(255), nullable=True, unique=None, default=None)
556 558 password = Column("password", String(255), nullable=True, unique=None, default=None)
557 559 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
558 560 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
559 561 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
560 562 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
561 563 _email = Column("email", String(255), nullable=True, unique=None, default=None)
562 564 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
563 565 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
564 566
565 567 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
566 568 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
567 569 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
568 570 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
569 571 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
570 572 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
571 573
572 574 user_log = relationship('UserLog')
573 575 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
574 576
575 577 repositories = relationship('Repository')
576 578 repository_groups = relationship('RepoGroup')
577 579 user_groups = relationship('UserGroup')
578 580
579 581 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
580 582 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
581 583
582 584 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
583 585 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
584 586 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
585 587
586 588 group_member = relationship('UserGroupMember', cascade='all')
587 589
588 590 notifications = relationship('UserNotification', cascade='all')
589 591 # notifications assigned to this user
590 592 user_created_notifications = relationship('Notification', cascade='all')
591 593 # comments created by this user
592 594 user_comments = relationship('ChangesetComment', cascade='all')
593 595 # user profile extra info
594 596 user_emails = relationship('UserEmailMap', cascade='all')
595 597 user_ip_map = relationship('UserIpMap', cascade='all')
596 598 user_auth_tokens = relationship('UserApiKeys', cascade='all')
597 599 user_ssh_keys = relationship('UserSshKeys', cascade='all')
598 600
599 601 # gists
600 602 user_gists = relationship('Gist', cascade='all')
601 603 # user pull requests
602 604 user_pull_requests = relationship('PullRequest', cascade='all')
603 605 # external identities
604 606 extenal_identities = relationship(
605 607 'ExternalIdentity',
606 608 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
607 609 cascade='all')
608 610 # review rules
609 611 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
610 612
611 613 def __unicode__(self):
612 614 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
613 615 self.user_id, self.username)
614 616
615 617 @hybrid_property
616 618 def email(self):
617 619 return self._email
618 620
619 621 @email.setter
620 622 def email(self, val):
621 623 self._email = val.lower() if val else None
622 624
623 625 @hybrid_property
624 626 def first_name(self):
625 627 from rhodecode.lib import helpers as h
626 628 if self.name:
627 629 return h.escape(self.name)
628 630 return self.name
629 631
630 632 @hybrid_property
631 633 def last_name(self):
632 634 from rhodecode.lib import helpers as h
633 635 if self.lastname:
634 636 return h.escape(self.lastname)
635 637 return self.lastname
636 638
637 639 @hybrid_property
638 640 def api_key(self):
639 641 """
640 642 Fetch if exist an auth-token with role ALL connected to this user
641 643 """
642 644 user_auth_token = UserApiKeys.query()\
643 645 .filter(UserApiKeys.user_id == self.user_id)\
644 646 .filter(or_(UserApiKeys.expires == -1,
645 647 UserApiKeys.expires >= time.time()))\
646 648 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
647 649 if user_auth_token:
648 650 user_auth_token = user_auth_token.api_key
649 651
650 652 return user_auth_token
651 653
652 654 @api_key.setter
653 655 def api_key(self, val):
654 656 # don't allow to set API key this is deprecated for now
655 657 self._api_key = None
656 658
657 659 @property
658 660 def reviewer_pull_requests(self):
659 661 return PullRequestReviewers.query() \
660 662 .options(joinedload(PullRequestReviewers.pull_request)) \
661 663 .filter(PullRequestReviewers.user_id == self.user_id) \
662 664 .all()
663 665
664 666 @property
665 667 def firstname(self):
666 668 # alias for future
667 669 return self.name
668 670
669 671 @property
670 672 def emails(self):
671 673 other = UserEmailMap.query()\
672 674 .filter(UserEmailMap.user == self) \
673 675 .order_by(UserEmailMap.email_id.asc()) \
674 676 .all()
675 677 return [self.email] + [x.email for x in other]
676 678
677 679 @property
678 680 def auth_tokens(self):
679 681 auth_tokens = self.get_auth_tokens()
680 682 return [x.api_key for x in auth_tokens]
681 683
682 684 def get_auth_tokens(self):
683 685 return UserApiKeys.query()\
684 686 .filter(UserApiKeys.user == self)\
685 687 .order_by(UserApiKeys.user_api_key_id.asc())\
686 688 .all()
687 689
688 690 @LazyProperty
689 691 def feed_token(self):
690 692 return self.get_feed_token()
691 693
692 694 def get_feed_token(self, cache=True):
693 695 feed_tokens = UserApiKeys.query()\
694 696 .filter(UserApiKeys.user == self)\
695 697 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
696 698 if cache:
697 699 feed_tokens = feed_tokens.options(
698 700 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
699 701
700 702 feed_tokens = feed_tokens.all()
701 703 if feed_tokens:
702 704 return feed_tokens[0].api_key
703 705 return 'NO_FEED_TOKEN_AVAILABLE'
704 706
705 707 @classmethod
706 708 def get(cls, user_id, cache=False):
707 709 if not user_id:
708 710 return
709 711
710 712 user = cls.query()
711 713 if cache:
712 714 user = user.options(
713 715 FromCache("sql_cache_short", "get_users_%s" % user_id))
714 716 return user.get(user_id)
715 717
716 718 @classmethod
717 719 def extra_valid_auth_tokens(cls, user, role=None):
718 720 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
719 721 .filter(or_(UserApiKeys.expires == -1,
720 722 UserApiKeys.expires >= time.time()))
721 723 if role:
722 724 tokens = tokens.filter(or_(UserApiKeys.role == role,
723 725 UserApiKeys.role == UserApiKeys.ROLE_ALL))
724 726 return tokens.all()
725 727
726 728 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
727 729 from rhodecode.lib import auth
728 730
729 731 log.debug('Trying to authenticate user: %s via auth-token, '
730 732 'and roles: %s', self, roles)
731 733
732 734 if not auth_token:
733 735 return False
734 736
735 737 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
736 738 tokens_q = UserApiKeys.query()\
737 739 .filter(UserApiKeys.user_id == self.user_id)\
738 740 .filter(or_(UserApiKeys.expires == -1,
739 741 UserApiKeys.expires >= time.time()))
740 742
741 743 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
742 744
743 745 crypto_backend = auth.crypto_backend()
744 746 enc_token_map = {}
745 747 plain_token_map = {}
746 748 for token in tokens_q:
747 749 if token.api_key.startswith(crypto_backend.ENC_PREF):
748 750 enc_token_map[token.api_key] = token
749 751 else:
750 752 plain_token_map[token.api_key] = token
751 753 log.debug(
752 754 'Found %s plain and %s encrypted user tokens to check for authentication',
753 755 len(plain_token_map), len(enc_token_map))
754 756
755 757 # plain token match comes first
756 758 match = plain_token_map.get(auth_token)
757 759
758 760 # check encrypted tokens now
759 761 if not match:
760 762 for token_hash, token in enc_token_map.items():
761 763 # NOTE(marcink): this is expensive to calculate, but most secure
762 764 if crypto_backend.hash_check(auth_token, token_hash):
763 765 match = token
764 766 break
765 767
766 768 if match:
767 769 log.debug('Found matching token %s', match)
768 770 if match.repo_id:
769 771 log.debug('Found scope, checking for scope match of token %s', match)
770 772 if match.repo_id == scope_repo_id:
771 773 return True
772 774 else:
773 775 log.debug(
774 776 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
775 777 'and calling scope is:%s, skipping further checks',
776 778 match.repo, scope_repo_id)
777 779 return False
778 780 else:
779 781 return True
780 782
781 783 return False
782 784
783 785 @property
784 786 def ip_addresses(self):
785 787 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
786 788 return [x.ip_addr for x in ret]
787 789
788 790 @property
789 791 def username_and_name(self):
790 792 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
791 793
792 794 @property
793 795 def username_or_name_or_email(self):
794 796 full_name = self.full_name if self.full_name is not ' ' else None
795 797 return self.username or full_name or self.email
796 798
797 799 @property
798 800 def full_name(self):
799 801 return '%s %s' % (self.first_name, self.last_name)
800 802
801 803 @property
802 804 def full_name_or_username(self):
803 805 return ('%s %s' % (self.first_name, self.last_name)
804 806 if (self.first_name and self.last_name) else self.username)
805 807
806 808 @property
807 809 def full_contact(self):
808 810 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
809 811
810 812 @property
811 813 def short_contact(self):
812 814 return '%s %s' % (self.first_name, self.last_name)
813 815
814 816 @property
815 817 def is_admin(self):
816 818 return self.admin
817 819
818 820 def AuthUser(self, **kwargs):
819 821 """
820 822 Returns instance of AuthUser for this user
821 823 """
822 824 from rhodecode.lib.auth import AuthUser
823 825 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
824 826
825 827 @hybrid_property
826 828 def user_data(self):
827 829 if not self._user_data:
828 830 return {}
829 831
830 832 try:
831 833 return json.loads(self._user_data)
832 834 except TypeError:
833 835 return {}
834 836
835 837 @user_data.setter
836 838 def user_data(self, val):
837 839 if not isinstance(val, dict):
838 840 raise Exception('user_data must be dict, got %s' % type(val))
839 841 try:
840 842 self._user_data = json.dumps(val)
841 843 except Exception:
842 844 log.error(traceback.format_exc())
843 845
844 846 @classmethod
845 847 def get_by_username(cls, username, case_insensitive=False,
846 848 cache=False, identity_cache=False):
847 849 session = Session()
848 850
849 851 if case_insensitive:
850 852 q = cls.query().filter(
851 853 func.lower(cls.username) == func.lower(username))
852 854 else:
853 855 q = cls.query().filter(cls.username == username)
854 856
855 857 if cache:
856 858 if identity_cache:
857 859 val = cls.identity_cache(session, 'username', username)
858 860 if val:
859 861 return val
860 862 else:
861 863 cache_key = "get_user_by_name_%s" % _hash_key(username)
862 864 q = q.options(
863 865 FromCache("sql_cache_short", cache_key))
864 866
865 867 return q.scalar()
866 868
867 869 @classmethod
868 870 def get_by_auth_token(cls, auth_token, cache=False):
869 871 q = UserApiKeys.query()\
870 872 .filter(UserApiKeys.api_key == auth_token)\
871 873 .filter(or_(UserApiKeys.expires == -1,
872 874 UserApiKeys.expires >= time.time()))
873 875 if cache:
874 876 q = q.options(
875 877 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
876 878
877 879 match = q.first()
878 880 if match:
879 881 return match.user
880 882
881 883 @classmethod
882 884 def get_by_email(cls, email, case_insensitive=False, cache=False):
883 885
884 886 if case_insensitive:
885 887 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
886 888
887 889 else:
888 890 q = cls.query().filter(cls.email == email)
889 891
890 892 email_key = _hash_key(email)
891 893 if cache:
892 894 q = q.options(
893 895 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
894 896
895 897 ret = q.scalar()
896 898 if ret is None:
897 899 q = UserEmailMap.query()
898 900 # try fetching in alternate email map
899 901 if case_insensitive:
900 902 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
901 903 else:
902 904 q = q.filter(UserEmailMap.email == email)
903 905 q = q.options(joinedload(UserEmailMap.user))
904 906 if cache:
905 907 q = q.options(
906 908 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
907 909 ret = getattr(q.scalar(), 'user', None)
908 910
909 911 return ret
910 912
911 913 @classmethod
912 914 def get_from_cs_author(cls, author):
913 915 """
914 916 Tries to get User objects out of commit author string
915 917
916 918 :param author:
917 919 """
918 920 from rhodecode.lib.helpers import email, author_name
919 921 # Valid email in the attribute passed, see if they're in the system
920 922 _email = email(author)
921 923 if _email:
922 924 user = cls.get_by_email(_email, case_insensitive=True)
923 925 if user:
924 926 return user
925 927 # Maybe we can match by username?
926 928 _author = author_name(author)
927 929 user = cls.get_by_username(_author, case_insensitive=True)
928 930 if user:
929 931 return user
930 932
931 933 def update_userdata(self, **kwargs):
932 934 usr = self
933 935 old = usr.user_data
934 936 old.update(**kwargs)
935 937 usr.user_data = old
936 938 Session().add(usr)
937 939 log.debug('updated userdata with ', kwargs)
938 940
939 941 def update_lastlogin(self):
940 942 """Update user lastlogin"""
941 943 self.last_login = datetime.datetime.now()
942 944 Session().add(self)
943 945 log.debug('updated user %s lastlogin', self.username)
944 946
945 947 def update_password(self, new_password):
946 948 from rhodecode.lib.auth import get_crypt_password
947 949
948 950 self.password = get_crypt_password(new_password)
949 951 Session().add(self)
950 952
951 953 @classmethod
952 954 def get_first_super_admin(cls):
953 955 user = User.query()\
954 956 .filter(User.admin == true()) \
955 957 .order_by(User.user_id.asc()) \
956 958 .first()
957 959
958 960 if user is None:
959 961 raise Exception('FATAL: Missing administrative account!')
960 962 return user
961 963
962 964 @classmethod
963 965 def get_all_super_admins(cls, only_active=False):
964 966 """
965 967 Returns all admin accounts sorted by username
966 968 """
967 969 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
968 970 if only_active:
969 971 qry = qry.filter(User.active == true())
970 972 return qry.all()
971 973
972 974 @classmethod
973 975 def get_default_user(cls, cache=False, refresh=False):
974 976 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
975 977 if user is None:
976 978 raise Exception('FATAL: Missing default account!')
977 979 if refresh:
978 980 # The default user might be based on outdated state which
979 981 # has been loaded from the cache.
980 982 # A call to refresh() ensures that the
981 983 # latest state from the database is used.
982 984 Session().refresh(user)
983 985 return user
984 986
985 987 def _get_default_perms(self, user, suffix=''):
986 988 from rhodecode.model.permission import PermissionModel
987 989 return PermissionModel().get_default_perms(user.user_perms, suffix)
988 990
989 991 def get_default_perms(self, suffix=''):
990 992 return self._get_default_perms(self, suffix)
991 993
992 994 def get_api_data(self, include_secrets=False, details='full'):
993 995 """
994 996 Common function for generating user related data for API
995 997
996 998 :param include_secrets: By default secrets in the API data will be replaced
997 999 by a placeholder value to prevent exposing this data by accident. In case
998 1000 this data shall be exposed, set this flag to ``True``.
999 1001
1000 1002 :param details: details can be 'basic|full' basic gives only a subset of
1001 1003 the available user information that includes user_id, name and emails.
1002 1004 """
1003 1005 user = self
1004 1006 user_data = self.user_data
1005 1007 data = {
1006 1008 'user_id': user.user_id,
1007 1009 'username': user.username,
1008 1010 'firstname': user.name,
1009 1011 'lastname': user.lastname,
1010 1012 'email': user.email,
1011 1013 'emails': user.emails,
1012 1014 }
1013 1015 if details == 'basic':
1014 1016 return data
1015 1017
1016 1018 auth_token_length = 40
1017 1019 auth_token_replacement = '*' * auth_token_length
1018 1020
1019 1021 extras = {
1020 1022 'auth_tokens': [auth_token_replacement],
1021 1023 'active': user.active,
1022 1024 'admin': user.admin,
1023 1025 'extern_type': user.extern_type,
1024 1026 'extern_name': user.extern_name,
1025 1027 'last_login': user.last_login,
1026 1028 'last_activity': user.last_activity,
1027 1029 'ip_addresses': user.ip_addresses,
1028 1030 'language': user_data.get('language')
1029 1031 }
1030 1032 data.update(extras)
1031 1033
1032 1034 if include_secrets:
1033 1035 data['auth_tokens'] = user.auth_tokens
1034 1036 return data
1035 1037
1036 1038 def __json__(self):
1037 1039 data = {
1038 1040 'full_name': self.full_name,
1039 1041 'full_name_or_username': self.full_name_or_username,
1040 1042 'short_contact': self.short_contact,
1041 1043 'full_contact': self.full_contact,
1042 1044 }
1043 1045 data.update(self.get_api_data())
1044 1046 return data
1045 1047
1046 1048
1047 1049 class UserApiKeys(Base, BaseModel):
1048 1050 __tablename__ = 'user_api_keys'
1049 1051 __table_args__ = (
1050 1052 Index('uak_api_key_idx', 'api_key', unique=True),
1051 1053 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1052 1054 base_table_args
1053 1055 )
1054 1056 __mapper_args__ = {}
1055 1057
1056 1058 # ApiKey role
1057 1059 ROLE_ALL = 'token_role_all'
1058 1060 ROLE_HTTP = 'token_role_http'
1059 1061 ROLE_VCS = 'token_role_vcs'
1060 1062 ROLE_API = 'token_role_api'
1061 1063 ROLE_FEED = 'token_role_feed'
1062 1064 ROLE_PASSWORD_RESET = 'token_password_reset'
1063 1065
1064 1066 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1065 1067
1066 1068 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1067 1069 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1068 1070 api_key = Column("api_key", String(255), nullable=False, unique=True)
1069 1071 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1070 1072 expires = Column('expires', Float(53), nullable=False)
1071 1073 role = Column('role', String(255), nullable=True)
1072 1074 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1073 1075
1074 1076 # scope columns
1075 1077 repo_id = Column(
1076 1078 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1077 1079 nullable=True, unique=None, default=None)
1078 1080 repo = relationship('Repository', lazy='joined')
1079 1081
1080 1082 repo_group_id = Column(
1081 1083 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1082 1084 nullable=True, unique=None, default=None)
1083 1085 repo_group = relationship('RepoGroup', lazy='joined')
1084 1086
1085 1087 user = relationship('User', lazy='joined')
1086 1088
1087 1089 def __unicode__(self):
1088 1090 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1089 1091
1090 1092 def __json__(self):
1091 1093 data = {
1092 1094 'auth_token': self.api_key,
1093 1095 'role': self.role,
1094 1096 'scope': self.scope_humanized,
1095 1097 'expired': self.expired
1096 1098 }
1097 1099 return data
1098 1100
1099 1101 def get_api_data(self, include_secrets=False):
1100 1102 data = self.__json__()
1101 1103 if include_secrets:
1102 1104 return data
1103 1105 else:
1104 1106 data['auth_token'] = self.token_obfuscated
1105 1107 return data
1106 1108
1107 1109 @hybrid_property
1108 1110 def description_safe(self):
1109 1111 from rhodecode.lib import helpers as h
1110 1112 return h.escape(self.description)
1111 1113
1112 1114 @property
1113 1115 def expired(self):
1114 1116 if self.expires == -1:
1115 1117 return False
1116 1118 return time.time() > self.expires
1117 1119
1118 1120 @classmethod
1119 1121 def _get_role_name(cls, role):
1120 1122 return {
1121 1123 cls.ROLE_ALL: _('all'),
1122 1124 cls.ROLE_HTTP: _('http/web interface'),
1123 1125 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1124 1126 cls.ROLE_API: _('api calls'),
1125 1127 cls.ROLE_FEED: _('feed access'),
1126 1128 }.get(role, role)
1127 1129
1128 1130 @property
1129 1131 def role_humanized(self):
1130 1132 return self._get_role_name(self.role)
1131 1133
1132 1134 def _get_scope(self):
1133 1135 if self.repo:
1134 1136 return 'Repository: {}'.format(self.repo.repo_name)
1135 1137 if self.repo_group:
1136 1138 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1137 1139 return 'Global'
1138 1140
1139 1141 @property
1140 1142 def scope_humanized(self):
1141 1143 return self._get_scope()
1142 1144
1143 1145 @property
1144 1146 def token_obfuscated(self):
1145 1147 if self.api_key:
1146 1148 return self.api_key[:4] + "****"
1147 1149
1148 1150
1149 1151 class UserEmailMap(Base, BaseModel):
1150 1152 __tablename__ = 'user_email_map'
1151 1153 __table_args__ = (
1152 1154 Index('uem_email_idx', 'email'),
1153 1155 UniqueConstraint('email'),
1154 1156 base_table_args
1155 1157 )
1156 1158 __mapper_args__ = {}
1157 1159
1158 1160 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1159 1161 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1160 1162 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1161 1163 user = relationship('User', lazy='joined')
1162 1164
1163 1165 @validates('_email')
1164 1166 def validate_email(self, key, email):
1165 1167 # check if this email is not main one
1166 1168 main_email = Session().query(User).filter(User.email == email).scalar()
1167 1169 if main_email is not None:
1168 1170 raise AttributeError('email %s is present is user table' % email)
1169 1171 return email
1170 1172
1171 1173 @hybrid_property
1172 1174 def email(self):
1173 1175 return self._email
1174 1176
1175 1177 @email.setter
1176 1178 def email(self, val):
1177 1179 self._email = val.lower() if val else None
1178 1180
1179 1181
1180 1182 class UserIpMap(Base, BaseModel):
1181 1183 __tablename__ = 'user_ip_map'
1182 1184 __table_args__ = (
1183 1185 UniqueConstraint('user_id', 'ip_addr'),
1184 1186 base_table_args
1185 1187 )
1186 1188 __mapper_args__ = {}
1187 1189
1188 1190 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1189 1191 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1190 1192 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1191 1193 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1192 1194 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1193 1195 user = relationship('User', lazy='joined')
1194 1196
1195 1197 @hybrid_property
1196 1198 def description_safe(self):
1197 1199 from rhodecode.lib import helpers as h
1198 1200 return h.escape(self.description)
1199 1201
1200 1202 @classmethod
1201 1203 def _get_ip_range(cls, ip_addr):
1202 1204 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1203 1205 return [str(net.network_address), str(net.broadcast_address)]
1204 1206
1205 1207 def __json__(self):
1206 1208 return {
1207 1209 'ip_addr': self.ip_addr,
1208 1210 'ip_range': self._get_ip_range(self.ip_addr),
1209 1211 }
1210 1212
1211 1213 def __unicode__(self):
1212 1214 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1213 1215 self.user_id, self.ip_addr)
1214 1216
1215 1217
1216 1218 class UserSshKeys(Base, BaseModel):
1217 1219 __tablename__ = 'user_ssh_keys'
1218 1220 __table_args__ = (
1219 1221 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1220 1222
1221 1223 UniqueConstraint('ssh_key_fingerprint'),
1222 1224
1223 1225 base_table_args
1224 1226 )
1225 1227 __mapper_args__ = {}
1226 1228
1227 1229 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1228 1230 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1229 1231 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1230 1232
1231 1233 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1232 1234
1233 1235 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1234 1236 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1235 1237 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1236 1238
1237 1239 user = relationship('User', lazy='joined')
1238 1240
1239 1241 def __json__(self):
1240 1242 data = {
1241 1243 'ssh_fingerprint': self.ssh_key_fingerprint,
1242 1244 'description': self.description,
1243 1245 'created_on': self.created_on
1244 1246 }
1245 1247 return data
1246 1248
1247 1249 def get_api_data(self):
1248 1250 data = self.__json__()
1249 1251 return data
1250 1252
1251 1253
1252 1254 class UserLog(Base, BaseModel):
1253 1255 __tablename__ = 'user_logs'
1254 1256 __table_args__ = (
1255 1257 base_table_args,
1256 1258 )
1257 1259
1258 1260 VERSION_1 = 'v1'
1259 1261 VERSION_2 = 'v2'
1260 1262 VERSIONS = [VERSION_1, VERSION_2]
1261 1263
1262 1264 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1263 1265 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1264 1266 username = Column("username", String(255), nullable=True, unique=None, default=None)
1265 1267 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1266 1268 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1267 1269 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1268 1270 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1269 1271 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1270 1272
1271 1273 version = Column("version", String(255), nullable=True, default=VERSION_1)
1272 1274 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1273 1275 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1274 1276
1275 1277 def __unicode__(self):
1276 1278 return u"<%s('id:%s:%s')>" % (
1277 1279 self.__class__.__name__, self.repository_name, self.action)
1278 1280
1279 1281 def __json__(self):
1280 1282 return {
1281 1283 'user_id': self.user_id,
1282 1284 'username': self.username,
1283 1285 'repository_id': self.repository_id,
1284 1286 'repository_name': self.repository_name,
1285 1287 'user_ip': self.user_ip,
1286 1288 'action_date': self.action_date,
1287 1289 'action': self.action,
1288 1290 }
1289 1291
1290 1292 @hybrid_property
1291 1293 def entry_id(self):
1292 1294 return self.user_log_id
1293 1295
1294 1296 @property
1295 1297 def action_as_day(self):
1296 1298 return datetime.date(*self.action_date.timetuple()[:3])
1297 1299
1298 1300 user = relationship('User')
1299 1301 repository = relationship('Repository', cascade='')
1300 1302
1301 1303
1302 1304 class UserGroup(Base, BaseModel):
1303 1305 __tablename__ = 'users_groups'
1304 1306 __table_args__ = (
1305 1307 base_table_args,
1306 1308 )
1307 1309
1308 1310 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1309 1311 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1310 1312 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1311 1313 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1312 1314 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1313 1315 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1314 1316 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1315 1317 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1316 1318
1317 1319 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1318 1320 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1319 1321 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1320 1322 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1321 1323 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1322 1324 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1323 1325
1324 1326 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1325 1327 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1326 1328
1327 1329 @classmethod
1328 1330 def _load_group_data(cls, column):
1329 1331 if not column:
1330 1332 return {}
1331 1333
1332 1334 try:
1333 1335 return json.loads(column) or {}
1334 1336 except TypeError:
1335 1337 return {}
1336 1338
1337 1339 @hybrid_property
1338 1340 def description_safe(self):
1339 1341 from rhodecode.lib import helpers as h
1340 1342 return h.escape(self.user_group_description)
1341 1343
1342 1344 @hybrid_property
1343 1345 def group_data(self):
1344 1346 return self._load_group_data(self._group_data)
1345 1347
1346 1348 @group_data.expression
1347 1349 def group_data(self, **kwargs):
1348 1350 return self._group_data
1349 1351
1350 1352 @group_data.setter
1351 1353 def group_data(self, val):
1352 1354 try:
1353 1355 self._group_data = json.dumps(val)
1354 1356 except Exception:
1355 1357 log.error(traceback.format_exc())
1356 1358
1357 1359 @classmethod
1358 1360 def _load_sync(cls, group_data):
1359 1361 if group_data:
1360 1362 return group_data.get('extern_type')
1361 1363
1362 1364 @property
1363 1365 def sync(self):
1364 1366 return self._load_sync(self.group_data)
1365 1367
1366 1368 def __unicode__(self):
1367 1369 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1368 1370 self.users_group_id,
1369 1371 self.users_group_name)
1370 1372
1371 1373 @classmethod
1372 1374 def get_by_group_name(cls, group_name, cache=False,
1373 1375 case_insensitive=False):
1374 1376 if case_insensitive:
1375 1377 q = cls.query().filter(func.lower(cls.users_group_name) ==
1376 1378 func.lower(group_name))
1377 1379
1378 1380 else:
1379 1381 q = cls.query().filter(cls.users_group_name == group_name)
1380 1382 if cache:
1381 1383 q = q.options(
1382 1384 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1383 1385 return q.scalar()
1384 1386
1385 1387 @classmethod
1386 1388 def get(cls, user_group_id, cache=False):
1387 1389 if not user_group_id:
1388 1390 return
1389 1391
1390 1392 user_group = cls.query()
1391 1393 if cache:
1392 1394 user_group = user_group.options(
1393 1395 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1394 1396 return user_group.get(user_group_id)
1395 1397
1396 1398 def permissions(self, with_admins=True, with_owner=True,
1397 1399 expand_from_user_groups=False):
1398 1400 """
1399 1401 Permissions for user groups
1400 1402 """
1401 1403 _admin_perm = 'usergroup.admin'
1402 1404
1403 1405 owner_row = []
1404 1406 if with_owner:
1405 1407 usr = AttributeDict(self.user.get_dict())
1406 1408 usr.owner_row = True
1407 1409 usr.permission = _admin_perm
1408 1410 owner_row.append(usr)
1409 1411
1410 1412 super_admin_ids = []
1411 1413 super_admin_rows = []
1412 1414 if with_admins:
1413 1415 for usr in User.get_all_super_admins():
1414 1416 super_admin_ids.append(usr.user_id)
1415 1417 # if this admin is also owner, don't double the record
1416 1418 if usr.user_id == owner_row[0].user_id:
1417 1419 owner_row[0].admin_row = True
1418 1420 else:
1419 1421 usr = AttributeDict(usr.get_dict())
1420 1422 usr.admin_row = True
1421 1423 usr.permission = _admin_perm
1422 1424 super_admin_rows.append(usr)
1423 1425
1424 1426 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1425 1427 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1426 1428 joinedload(UserUserGroupToPerm.user),
1427 1429 joinedload(UserUserGroupToPerm.permission),)
1428 1430
1429 1431 # get owners and admins and permissions. We do a trick of re-writing
1430 1432 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1431 1433 # has a global reference and changing one object propagates to all
1432 1434 # others. This means if admin is also an owner admin_row that change
1433 1435 # would propagate to both objects
1434 1436 perm_rows = []
1435 1437 for _usr in q.all():
1436 1438 usr = AttributeDict(_usr.user.get_dict())
1437 1439 # if this user is also owner/admin, mark as duplicate record
1438 1440 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1439 1441 usr.duplicate_perm = True
1440 1442 usr.permission = _usr.permission.permission_name
1441 1443 perm_rows.append(usr)
1442 1444
1443 1445 # filter the perm rows by 'default' first and then sort them by
1444 1446 # admin,write,read,none permissions sorted again alphabetically in
1445 1447 # each group
1446 1448 perm_rows = sorted(perm_rows, key=display_user_sort)
1447 1449
1448 1450 user_groups_rows = []
1449 1451 if expand_from_user_groups:
1450 1452 for ug in self.permission_user_groups(with_members=True):
1451 1453 for user_data in ug.members:
1452 1454 user_groups_rows.append(user_data)
1453 1455
1454 1456 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1455 1457
1456 1458 def permission_user_groups(self, with_members=False):
1457 1459 q = UserGroupUserGroupToPerm.query()\
1458 1460 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1459 1461 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1460 1462 joinedload(UserGroupUserGroupToPerm.target_user_group),
1461 1463 joinedload(UserGroupUserGroupToPerm.permission),)
1462 1464
1463 1465 perm_rows = []
1464 1466 for _user_group in q.all():
1465 1467 entry = AttributeDict(_user_group.user_group.get_dict())
1466 1468 entry.permission = _user_group.permission.permission_name
1467 1469 if with_members:
1468 1470 entry.members = [x.user.get_dict()
1469 1471 for x in _user_group.users_group.members]
1470 1472 perm_rows.append(entry)
1471 1473
1472 1474 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1473 1475 return perm_rows
1474 1476
1475 1477 def _get_default_perms(self, user_group, suffix=''):
1476 1478 from rhodecode.model.permission import PermissionModel
1477 1479 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1478 1480
1479 1481 def get_default_perms(self, suffix=''):
1480 1482 return self._get_default_perms(self, suffix)
1481 1483
1482 1484 def get_api_data(self, with_group_members=True, include_secrets=False):
1483 1485 """
1484 1486 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1485 1487 basically forwarded.
1486 1488
1487 1489 """
1488 1490 user_group = self
1489 1491 data = {
1490 1492 'users_group_id': user_group.users_group_id,
1491 1493 'group_name': user_group.users_group_name,
1492 1494 'group_description': user_group.user_group_description,
1493 1495 'active': user_group.users_group_active,
1494 1496 'owner': user_group.user.username,
1495 1497 'sync': user_group.sync,
1496 1498 'owner_email': user_group.user.email,
1497 1499 }
1498 1500
1499 1501 if with_group_members:
1500 1502 users = []
1501 1503 for user in user_group.members:
1502 1504 user = user.user
1503 1505 users.append(user.get_api_data(include_secrets=include_secrets))
1504 1506 data['users'] = users
1505 1507
1506 1508 return data
1507 1509
1508 1510
1509 1511 class UserGroupMember(Base, BaseModel):
1510 1512 __tablename__ = 'users_groups_members'
1511 1513 __table_args__ = (
1512 1514 base_table_args,
1513 1515 )
1514 1516
1515 1517 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1516 1518 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1517 1519 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1518 1520
1519 1521 user = relationship('User', lazy='joined')
1520 1522 users_group = relationship('UserGroup')
1521 1523
1522 1524 def __init__(self, gr_id='', u_id=''):
1523 1525 self.users_group_id = gr_id
1524 1526 self.user_id = u_id
1525 1527
1526 1528
1527 1529 class RepositoryField(Base, BaseModel):
1528 1530 __tablename__ = 'repositories_fields'
1529 1531 __table_args__ = (
1530 1532 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1531 1533 base_table_args,
1532 1534 )
1533 1535
1534 1536 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1535 1537
1536 1538 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1537 1539 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1538 1540 field_key = Column("field_key", String(250))
1539 1541 field_label = Column("field_label", String(1024), nullable=False)
1540 1542 field_value = Column("field_value", String(10000), nullable=False)
1541 1543 field_desc = Column("field_desc", String(1024), nullable=False)
1542 1544 field_type = Column("field_type", String(255), nullable=False, unique=None)
1543 1545 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1544 1546
1545 1547 repository = relationship('Repository')
1546 1548
1547 1549 @property
1548 1550 def field_key_prefixed(self):
1549 1551 return 'ex_%s' % self.field_key
1550 1552
1551 1553 @classmethod
1552 1554 def un_prefix_key(cls, key):
1553 1555 if key.startswith(cls.PREFIX):
1554 1556 return key[len(cls.PREFIX):]
1555 1557 return key
1556 1558
1557 1559 @classmethod
1558 1560 def get_by_key_name(cls, key, repo):
1559 1561 row = cls.query()\
1560 1562 .filter(cls.repository == repo)\
1561 1563 .filter(cls.field_key == key).scalar()
1562 1564 return row
1563 1565
1564 1566
1565 1567 class Repository(Base, BaseModel):
1566 1568 __tablename__ = 'repositories'
1567 1569 __table_args__ = (
1568 1570 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1569 1571 base_table_args,
1570 1572 )
1571 1573 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1572 1574 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1573 1575 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1574 1576
1575 1577 STATE_CREATED = 'repo_state_created'
1576 1578 STATE_PENDING = 'repo_state_pending'
1577 1579 STATE_ERROR = 'repo_state_error'
1578 1580
1579 1581 LOCK_AUTOMATIC = 'lock_auto'
1580 1582 LOCK_API = 'lock_api'
1581 1583 LOCK_WEB = 'lock_web'
1582 1584 LOCK_PULL = 'lock_pull'
1583 1585
1584 1586 NAME_SEP = URL_SEP
1585 1587
1586 1588 repo_id = Column(
1587 1589 "repo_id", Integer(), nullable=False, unique=True, default=None,
1588 1590 primary_key=True)
1589 1591 _repo_name = Column(
1590 1592 "repo_name", Text(), nullable=False, default=None)
1591 1593 _repo_name_hash = Column(
1592 1594 "repo_name_hash", String(255), nullable=False, unique=True)
1593 1595 repo_state = Column("repo_state", String(255), nullable=True)
1594 1596
1595 1597 clone_uri = Column(
1596 1598 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1597 1599 default=None)
1598 1600 push_uri = Column(
1599 1601 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1600 1602 default=None)
1601 1603 repo_type = Column(
1602 1604 "repo_type", String(255), nullable=False, unique=False, default=None)
1603 1605 user_id = Column(
1604 1606 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1605 1607 unique=False, default=None)
1606 1608 private = Column(
1607 1609 "private", Boolean(), nullable=True, unique=None, default=None)
1608 1610 archived = Column(
1609 1611 "archived", Boolean(), nullable=True, unique=None, default=None)
1610 1612 enable_statistics = Column(
1611 1613 "statistics", Boolean(), nullable=True, unique=None, default=True)
1612 1614 enable_downloads = Column(
1613 1615 "downloads", Boolean(), nullable=True, unique=None, default=True)
1614 1616 description = Column(
1615 1617 "description", String(10000), nullable=True, unique=None, default=None)
1616 1618 created_on = Column(
1617 1619 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1618 1620 default=datetime.datetime.now)
1619 1621 updated_on = Column(
1620 1622 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1621 1623 default=datetime.datetime.now)
1622 1624 _landing_revision = Column(
1623 1625 "landing_revision", String(255), nullable=False, unique=False,
1624 1626 default=None)
1625 1627 enable_locking = Column(
1626 1628 "enable_locking", Boolean(), nullable=False, unique=None,
1627 1629 default=False)
1628 1630 _locked = Column(
1629 1631 "locked", String(255), nullable=True, unique=False, default=None)
1630 1632 _changeset_cache = Column(
1631 1633 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1632 1634
1633 1635 fork_id = Column(
1634 1636 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1635 1637 nullable=True, unique=False, default=None)
1636 1638 group_id = Column(
1637 1639 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1638 1640 unique=False, default=None)
1639 1641
1640 1642 user = relationship('User', lazy='joined')
1641 1643 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1642 1644 group = relationship('RepoGroup', lazy='joined')
1643 1645 repo_to_perm = relationship(
1644 1646 'UserRepoToPerm', cascade='all',
1645 1647 order_by='UserRepoToPerm.repo_to_perm_id')
1646 1648 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1647 1649 stats = relationship('Statistics', cascade='all', uselist=False)
1648 1650
1649 1651 followers = relationship(
1650 1652 'UserFollowing',
1651 1653 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1652 1654 cascade='all')
1653 1655 extra_fields = relationship(
1654 1656 'RepositoryField', cascade="all, delete, delete-orphan")
1655 1657 logs = relationship('UserLog')
1656 1658 comments = relationship(
1657 1659 'ChangesetComment', cascade="all, delete, delete-orphan")
1658 1660 pull_requests_source = relationship(
1659 1661 'PullRequest',
1660 1662 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1661 1663 cascade="all, delete, delete-orphan")
1662 1664 pull_requests_target = relationship(
1663 1665 'PullRequest',
1664 1666 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1665 1667 cascade="all, delete, delete-orphan")
1666 1668 ui = relationship('RepoRhodeCodeUi', cascade="all")
1667 1669 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1668 1670 integrations = relationship('Integration',
1669 1671 cascade="all, delete, delete-orphan")
1670 1672
1671 1673 scoped_tokens = relationship('UserApiKeys', cascade="all")
1672 1674
1673 1675 def __unicode__(self):
1674 1676 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1675 1677 safe_unicode(self.repo_name))
1676 1678
1677 1679 @hybrid_property
1678 1680 def description_safe(self):
1679 1681 from rhodecode.lib import helpers as h
1680 1682 return h.escape(self.description)
1681 1683
1682 1684 @hybrid_property
1683 1685 def landing_rev(self):
1684 1686 # always should return [rev_type, rev]
1685 1687 if self._landing_revision:
1686 1688 _rev_info = self._landing_revision.split(':')
1687 1689 if len(_rev_info) < 2:
1688 1690 _rev_info.insert(0, 'rev')
1689 1691 return [_rev_info[0], _rev_info[1]]
1690 1692 return [None, None]
1691 1693
1692 1694 @landing_rev.setter
1693 1695 def landing_rev(self, val):
1694 1696 if ':' not in val:
1695 1697 raise ValueError('value must be delimited with `:` and consist '
1696 1698 'of <rev_type>:<rev>, got %s instead' % val)
1697 1699 self._landing_revision = val
1698 1700
1699 1701 @hybrid_property
1700 1702 def locked(self):
1701 1703 if self._locked:
1702 1704 user_id, timelocked, reason = self._locked.split(':')
1703 1705 lock_values = int(user_id), timelocked, reason
1704 1706 else:
1705 1707 lock_values = [None, None, None]
1706 1708 return lock_values
1707 1709
1708 1710 @locked.setter
1709 1711 def locked(self, val):
1710 1712 if val and isinstance(val, (list, tuple)):
1711 1713 self._locked = ':'.join(map(str, val))
1712 1714 else:
1713 1715 self._locked = None
1714 1716
1715 1717 @hybrid_property
1716 1718 def changeset_cache(self):
1717 1719 from rhodecode.lib.vcs.backends.base import EmptyCommit
1718 1720 dummy = EmptyCommit().__json__()
1719 1721 if not self._changeset_cache:
1720 1722 return dummy
1721 1723 try:
1722 1724 return json.loads(self._changeset_cache)
1723 1725 except TypeError:
1724 1726 return dummy
1725 1727 except Exception:
1726 1728 log.error(traceback.format_exc())
1727 1729 return dummy
1728 1730
1729 1731 @changeset_cache.setter
1730 1732 def changeset_cache(self, val):
1731 1733 try:
1732 1734 self._changeset_cache = json.dumps(val)
1733 1735 except Exception:
1734 1736 log.error(traceback.format_exc())
1735 1737
1736 1738 @hybrid_property
1737 1739 def repo_name(self):
1738 1740 return self._repo_name
1739 1741
1740 1742 @repo_name.setter
1741 1743 def repo_name(self, value):
1742 1744 self._repo_name = value
1743 1745 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1744 1746
1745 1747 @classmethod
1746 1748 def normalize_repo_name(cls, repo_name):
1747 1749 """
1748 1750 Normalizes os specific repo_name to the format internally stored inside
1749 1751 database using URL_SEP
1750 1752
1751 1753 :param cls:
1752 1754 :param repo_name:
1753 1755 """
1754 1756 return cls.NAME_SEP.join(repo_name.split(os.sep))
1755 1757
1756 1758 @classmethod
1757 1759 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1758 1760 session = Session()
1759 1761 q = session.query(cls).filter(cls.repo_name == repo_name)
1760 1762
1761 1763 if cache:
1762 1764 if identity_cache:
1763 1765 val = cls.identity_cache(session, 'repo_name', repo_name)
1764 1766 if val:
1765 1767 return val
1766 1768 else:
1767 1769 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1768 1770 q = q.options(
1769 1771 FromCache("sql_cache_short", cache_key))
1770 1772
1771 1773 return q.scalar()
1772 1774
1773 1775 @classmethod
1774 1776 def get_by_id_or_repo_name(cls, repoid):
1775 1777 if isinstance(repoid, (int, long)):
1776 1778 try:
1777 1779 repo = cls.get(repoid)
1778 1780 except ValueError:
1779 1781 repo = None
1780 1782 else:
1781 1783 repo = cls.get_by_repo_name(repoid)
1782 1784 return repo
1783 1785
1784 1786 @classmethod
1785 1787 def get_by_full_path(cls, repo_full_path):
1786 1788 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1787 1789 repo_name = cls.normalize_repo_name(repo_name)
1788 1790 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1789 1791
1790 1792 @classmethod
1791 1793 def get_repo_forks(cls, repo_id):
1792 1794 return cls.query().filter(Repository.fork_id == repo_id)
1793 1795
1794 1796 @classmethod
1795 1797 def base_path(cls):
1796 1798 """
1797 1799 Returns base path when all repos are stored
1798 1800
1799 1801 :param cls:
1800 1802 """
1801 1803 q = Session().query(RhodeCodeUi)\
1802 1804 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1803 1805 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1804 1806 return q.one().ui_value
1805 1807
1806 1808 @classmethod
1807 1809 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1808 1810 case_insensitive=True, archived=False):
1809 1811 q = Repository.query()
1810 1812
1811 1813 if not archived:
1812 1814 q = q.filter(Repository.archived.isnot(true()))
1813 1815
1814 1816 if not isinstance(user_id, Optional):
1815 1817 q = q.filter(Repository.user_id == user_id)
1816 1818
1817 1819 if not isinstance(group_id, Optional):
1818 1820 q = q.filter(Repository.group_id == group_id)
1819 1821
1820 1822 if case_insensitive:
1821 1823 q = q.order_by(func.lower(Repository.repo_name))
1822 1824 else:
1823 1825 q = q.order_by(Repository.repo_name)
1824 1826
1825 1827 return q.all()
1826 1828
1827 1829 @property
1828 1830 def forks(self):
1829 1831 """
1830 1832 Return forks of this repo
1831 1833 """
1832 1834 return Repository.get_repo_forks(self.repo_id)
1833 1835
1834 1836 @property
1835 1837 def parent(self):
1836 1838 """
1837 1839 Returns fork parent
1838 1840 """
1839 1841 return self.fork
1840 1842
1841 1843 @property
1842 1844 def just_name(self):
1843 1845 return self.repo_name.split(self.NAME_SEP)[-1]
1844 1846
1845 1847 @property
1846 1848 def groups_with_parents(self):
1847 1849 groups = []
1848 1850 if self.group is None:
1849 1851 return groups
1850 1852
1851 1853 cur_gr = self.group
1852 1854 groups.insert(0, cur_gr)
1853 1855 while 1:
1854 1856 gr = getattr(cur_gr, 'parent_group', None)
1855 1857 cur_gr = cur_gr.parent_group
1856 1858 if gr is None:
1857 1859 break
1858 1860 groups.insert(0, gr)
1859 1861
1860 1862 return groups
1861 1863
1862 1864 @property
1863 1865 def groups_and_repo(self):
1864 1866 return self.groups_with_parents, self
1865 1867
1866 1868 @LazyProperty
1867 1869 def repo_path(self):
1868 1870 """
1869 1871 Returns base full path for that repository means where it actually
1870 1872 exists on a filesystem
1871 1873 """
1872 1874 q = Session().query(RhodeCodeUi).filter(
1873 1875 RhodeCodeUi.ui_key == self.NAME_SEP)
1874 1876 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1875 1877 return q.one().ui_value
1876 1878
1877 1879 @property
1878 1880 def repo_full_path(self):
1879 1881 p = [self.repo_path]
1880 1882 # we need to split the name by / since this is how we store the
1881 1883 # names in the database, but that eventually needs to be converted
1882 1884 # into a valid system path
1883 1885 p += self.repo_name.split(self.NAME_SEP)
1884 1886 return os.path.join(*map(safe_unicode, p))
1885 1887
1886 1888 @property
1887 1889 def cache_keys(self):
1888 1890 """
1889 1891 Returns associated cache keys for that repo
1890 1892 """
1891 1893 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1892 1894 repo_id=self.repo_id)
1893 1895 return CacheKey.query()\
1894 1896 .filter(CacheKey.cache_args == invalidation_namespace)\
1895 1897 .order_by(CacheKey.cache_key)\
1896 1898 .all()
1897 1899
1898 1900 @property
1899 1901 def cached_diffs_relative_dir(self):
1900 1902 """
1901 1903 Return a relative to the repository store path of cached diffs
1902 1904 used for safe display for users, who shouldn't know the absolute store
1903 1905 path
1904 1906 """
1905 1907 return os.path.join(
1906 1908 os.path.dirname(self.repo_name),
1907 1909 self.cached_diffs_dir.split(os.path.sep)[-1])
1908 1910
1909 1911 @property
1910 1912 def cached_diffs_dir(self):
1911 1913 path = self.repo_full_path
1912 1914 return os.path.join(
1913 1915 os.path.dirname(path),
1914 1916 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1915 1917
1916 1918 def cached_diffs(self):
1917 1919 diff_cache_dir = self.cached_diffs_dir
1918 1920 if os.path.isdir(diff_cache_dir):
1919 1921 return os.listdir(diff_cache_dir)
1920 1922 return []
1921 1923
1922 1924 def shadow_repos(self):
1923 1925 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1924 1926 return [
1925 1927 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1926 1928 if x.startswith(shadow_repos_pattern)]
1927 1929
1928 1930 def get_new_name(self, repo_name):
1929 1931 """
1930 1932 returns new full repository name based on assigned group and new new
1931 1933
1932 1934 :param group_name:
1933 1935 """
1934 1936 path_prefix = self.group.full_path_splitted if self.group else []
1935 1937 return self.NAME_SEP.join(path_prefix + [repo_name])
1936 1938
1937 1939 @property
1938 1940 def _config(self):
1939 1941 """
1940 1942 Returns db based config object.
1941 1943 """
1942 1944 from rhodecode.lib.utils import make_db_config
1943 1945 return make_db_config(clear_session=False, repo=self)
1944 1946
1945 1947 def permissions(self, with_admins=True, with_owner=True,
1946 1948 expand_from_user_groups=False):
1947 1949 """
1948 1950 Permissions for repositories
1949 1951 """
1950 1952 _admin_perm = 'repository.admin'
1951 1953
1952 1954 owner_row = []
1953 1955 if with_owner:
1954 1956 usr = AttributeDict(self.user.get_dict())
1955 1957 usr.owner_row = True
1956 1958 usr.permission = _admin_perm
1957 1959 usr.permission_id = None
1958 1960 owner_row.append(usr)
1959 1961
1960 1962 super_admin_ids = []
1961 1963 super_admin_rows = []
1962 1964 if with_admins:
1963 1965 for usr in User.get_all_super_admins():
1964 1966 super_admin_ids.append(usr.user_id)
1965 1967 # if this admin is also owner, don't double the record
1966 1968 if usr.user_id == owner_row[0].user_id:
1967 1969 owner_row[0].admin_row = True
1968 1970 else:
1969 1971 usr = AttributeDict(usr.get_dict())
1970 1972 usr.admin_row = True
1971 1973 usr.permission = _admin_perm
1972 1974 usr.permission_id = None
1973 1975 super_admin_rows.append(usr)
1974 1976
1975 1977 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1976 1978 q = q.options(joinedload(UserRepoToPerm.repository),
1977 1979 joinedload(UserRepoToPerm.user),
1978 1980 joinedload(UserRepoToPerm.permission),)
1979 1981
1980 1982 # get owners and admins and permissions. We do a trick of re-writing
1981 1983 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1982 1984 # has a global reference and changing one object propagates to all
1983 1985 # others. This means if admin is also an owner admin_row that change
1984 1986 # would propagate to both objects
1985 1987 perm_rows = []
1986 1988 for _usr in q.all():
1987 1989 usr = AttributeDict(_usr.user.get_dict())
1988 1990 # if this user is also owner/admin, mark as duplicate record
1989 1991 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1990 1992 usr.duplicate_perm = True
1991 1993 # also check if this permission is maybe used by branch_permissions
1992 1994 if _usr.branch_perm_entry:
1993 1995 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
1994 1996
1995 1997 usr.permission = _usr.permission.permission_name
1996 1998 usr.permission_id = _usr.repo_to_perm_id
1997 1999 perm_rows.append(usr)
1998 2000
1999 2001 # filter the perm rows by 'default' first and then sort them by
2000 2002 # admin,write,read,none permissions sorted again alphabetically in
2001 2003 # each group
2002 2004 perm_rows = sorted(perm_rows, key=display_user_sort)
2003 2005
2004 2006 user_groups_rows = []
2005 2007 if expand_from_user_groups:
2006 2008 for ug in self.permission_user_groups(with_members=True):
2007 2009 for user_data in ug.members:
2008 2010 user_groups_rows.append(user_data)
2009 2011
2010 2012 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2011 2013
2012 2014 def permission_user_groups(self, with_members=True):
2013 2015 q = UserGroupRepoToPerm.query()\
2014 2016 .filter(UserGroupRepoToPerm.repository == self)
2015 2017 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2016 2018 joinedload(UserGroupRepoToPerm.users_group),
2017 2019 joinedload(UserGroupRepoToPerm.permission),)
2018 2020
2019 2021 perm_rows = []
2020 2022 for _user_group in q.all():
2021 2023 entry = AttributeDict(_user_group.users_group.get_dict())
2022 2024 entry.permission = _user_group.permission.permission_name
2023 2025 if with_members:
2024 2026 entry.members = [x.user.get_dict()
2025 2027 for x in _user_group.users_group.members]
2026 2028 perm_rows.append(entry)
2027 2029
2028 2030 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2029 2031 return perm_rows
2030 2032
2031 2033 def get_api_data(self, include_secrets=False):
2032 2034 """
2033 2035 Common function for generating repo api data
2034 2036
2035 2037 :param include_secrets: See :meth:`User.get_api_data`.
2036 2038
2037 2039 """
2038 2040 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2039 2041 # move this methods on models level.
2040 2042 from rhodecode.model.settings import SettingsModel
2041 2043 from rhodecode.model.repo import RepoModel
2042 2044
2043 2045 repo = self
2044 2046 _user_id, _time, _reason = self.locked
2045 2047
2046 2048 data = {
2047 2049 'repo_id': repo.repo_id,
2048 2050 'repo_name': repo.repo_name,
2049 2051 'repo_type': repo.repo_type,
2050 2052 'clone_uri': repo.clone_uri or '',
2051 2053 'push_uri': repo.push_uri or '',
2052 2054 'url': RepoModel().get_url(self),
2053 2055 'private': repo.private,
2054 2056 'created_on': repo.created_on,
2055 2057 'description': repo.description_safe,
2056 2058 'landing_rev': repo.landing_rev,
2057 2059 'owner': repo.user.username,
2058 2060 'fork_of': repo.fork.repo_name if repo.fork else None,
2059 2061 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2060 2062 'enable_statistics': repo.enable_statistics,
2061 2063 'enable_locking': repo.enable_locking,
2062 2064 'enable_downloads': repo.enable_downloads,
2063 2065 'last_changeset': repo.changeset_cache,
2064 2066 'locked_by': User.get(_user_id).get_api_data(
2065 2067 include_secrets=include_secrets) if _user_id else None,
2066 2068 'locked_date': time_to_datetime(_time) if _time else None,
2067 2069 'lock_reason': _reason if _reason else None,
2068 2070 }
2069 2071
2070 2072 # TODO: mikhail: should be per-repo settings here
2071 2073 rc_config = SettingsModel().get_all_settings()
2072 2074 repository_fields = str2bool(
2073 2075 rc_config.get('rhodecode_repository_fields'))
2074 2076 if repository_fields:
2075 2077 for f in self.extra_fields:
2076 2078 data[f.field_key_prefixed] = f.field_value
2077 2079
2078 2080 return data
2079 2081
2080 2082 @classmethod
2081 2083 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2082 2084 if not lock_time:
2083 2085 lock_time = time.time()
2084 2086 if not lock_reason:
2085 2087 lock_reason = cls.LOCK_AUTOMATIC
2086 2088 repo.locked = [user_id, lock_time, lock_reason]
2087 2089 Session().add(repo)
2088 2090 Session().commit()
2089 2091
2090 2092 @classmethod
2091 2093 def unlock(cls, repo):
2092 2094 repo.locked = None
2093 2095 Session().add(repo)
2094 2096 Session().commit()
2095 2097
2096 2098 @classmethod
2097 2099 def getlock(cls, repo):
2098 2100 return repo.locked
2099 2101
2100 2102 def is_user_lock(self, user_id):
2101 2103 if self.lock[0]:
2102 2104 lock_user_id = safe_int(self.lock[0])
2103 2105 user_id = safe_int(user_id)
2104 2106 # both are ints, and they are equal
2105 2107 return all([lock_user_id, user_id]) and lock_user_id == user_id
2106 2108
2107 2109 return False
2108 2110
2109 2111 def get_locking_state(self, action, user_id, only_when_enabled=True):
2110 2112 """
2111 2113 Checks locking on this repository, if locking is enabled and lock is
2112 2114 present returns a tuple of make_lock, locked, locked_by.
2113 2115 make_lock can have 3 states None (do nothing) True, make lock
2114 2116 False release lock, This value is later propagated to hooks, which
2115 2117 do the locking. Think about this as signals passed to hooks what to do.
2116 2118
2117 2119 """
2118 2120 # TODO: johbo: This is part of the business logic and should be moved
2119 2121 # into the RepositoryModel.
2120 2122
2121 2123 if action not in ('push', 'pull'):
2122 2124 raise ValueError("Invalid action value: %s" % repr(action))
2123 2125
2124 2126 # defines if locked error should be thrown to user
2125 2127 currently_locked = False
2126 2128 # defines if new lock should be made, tri-state
2127 2129 make_lock = None
2128 2130 repo = self
2129 2131 user = User.get(user_id)
2130 2132
2131 2133 lock_info = repo.locked
2132 2134
2133 2135 if repo and (repo.enable_locking or not only_when_enabled):
2134 2136 if action == 'push':
2135 2137 # check if it's already locked !, if it is compare users
2136 2138 locked_by_user_id = lock_info[0]
2137 2139 if user.user_id == locked_by_user_id:
2138 2140 log.debug(
2139 2141 'Got `push` action from user %s, now unlocking', user)
2140 2142 # unlock if we have push from user who locked
2141 2143 make_lock = False
2142 2144 else:
2143 2145 # we're not the same user who locked, ban with
2144 2146 # code defined in settings (default is 423 HTTP Locked) !
2145 2147 log.debug('Repo %s is currently locked by %s', repo, user)
2146 2148 currently_locked = True
2147 2149 elif action == 'pull':
2148 2150 # [0] user [1] date
2149 2151 if lock_info[0] and lock_info[1]:
2150 2152 log.debug('Repo %s is currently locked by %s', repo, user)
2151 2153 currently_locked = True
2152 2154 else:
2153 2155 log.debug('Setting lock on repo %s by %s', repo, user)
2154 2156 make_lock = True
2155 2157
2156 2158 else:
2157 2159 log.debug('Repository %s do not have locking enabled', repo)
2158 2160
2159 2161 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2160 2162 make_lock, currently_locked, lock_info)
2161 2163
2162 2164 from rhodecode.lib.auth import HasRepoPermissionAny
2163 2165 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2164 2166 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2165 2167 # if we don't have at least write permission we cannot make a lock
2166 2168 log.debug('lock state reset back to FALSE due to lack '
2167 2169 'of at least read permission')
2168 2170 make_lock = False
2169 2171
2170 2172 return make_lock, currently_locked, lock_info
2171 2173
2172 2174 @property
2173 2175 def last_db_change(self):
2174 2176 return self.updated_on
2175 2177
2176 2178 @property
2177 2179 def clone_uri_hidden(self):
2178 2180 clone_uri = self.clone_uri
2179 2181 if clone_uri:
2180 2182 import urlobject
2181 2183 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2182 2184 if url_obj.password:
2183 2185 clone_uri = url_obj.with_password('*****')
2184 2186 return clone_uri
2185 2187
2186 2188 @property
2187 2189 def push_uri_hidden(self):
2188 2190 push_uri = self.push_uri
2189 2191 if push_uri:
2190 2192 import urlobject
2191 2193 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2192 2194 if url_obj.password:
2193 2195 push_uri = url_obj.with_password('*****')
2194 2196 return push_uri
2195 2197
2196 2198 def clone_url(self, **override):
2197 2199 from rhodecode.model.settings import SettingsModel
2198 2200
2199 2201 uri_tmpl = None
2200 2202 if 'with_id' in override:
2201 2203 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2202 2204 del override['with_id']
2203 2205
2204 2206 if 'uri_tmpl' in override:
2205 2207 uri_tmpl = override['uri_tmpl']
2206 2208 del override['uri_tmpl']
2207 2209
2208 2210 ssh = False
2209 2211 if 'ssh' in override:
2210 2212 ssh = True
2211 2213 del override['ssh']
2212 2214
2213 2215 # we didn't override our tmpl from **overrides
2214 2216 if not uri_tmpl:
2215 2217 rc_config = SettingsModel().get_all_settings(cache=True)
2216 2218 if ssh:
2217 2219 uri_tmpl = rc_config.get(
2218 2220 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2219 2221 else:
2220 2222 uri_tmpl = rc_config.get(
2221 2223 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2222 2224
2223 2225 request = get_current_request()
2224 2226 return get_clone_url(request=request,
2225 2227 uri_tmpl=uri_tmpl,
2226 2228 repo_name=self.repo_name,
2227 2229 repo_id=self.repo_id, **override)
2228 2230
2229 2231 def set_state(self, state):
2230 2232 self.repo_state = state
2231 2233 Session().add(self)
2232 2234 #==========================================================================
2233 2235 # SCM PROPERTIES
2234 2236 #==========================================================================
2235 2237
2236 2238 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2237 2239 return get_commit_safe(
2238 2240 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2239 2241
2240 2242 def get_changeset(self, rev=None, pre_load=None):
2241 2243 warnings.warn("Use get_commit", DeprecationWarning)
2242 2244 commit_id = None
2243 2245 commit_idx = None
2244 2246 if isinstance(rev, compat.string_types):
2245 2247 commit_id = rev
2246 2248 else:
2247 2249 commit_idx = rev
2248 2250 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2249 2251 pre_load=pre_load)
2250 2252
2251 2253 def get_landing_commit(self):
2252 2254 """
2253 2255 Returns landing commit, or if that doesn't exist returns the tip
2254 2256 """
2255 2257 _rev_type, _rev = self.landing_rev
2256 2258 commit = self.get_commit(_rev)
2257 2259 if isinstance(commit, EmptyCommit):
2258 2260 return self.get_commit()
2259 2261 return commit
2260 2262
2261 2263 def update_commit_cache(self, cs_cache=None, config=None):
2262 2264 """
2263 2265 Update cache of last changeset for repository, keys should be::
2264 2266
2265 2267 short_id
2266 2268 raw_id
2267 2269 revision
2268 2270 parents
2269 2271 message
2270 2272 date
2271 2273 author
2272 2274
2273 2275 :param cs_cache:
2274 2276 """
2275 2277 from rhodecode.lib.vcs.backends.base import BaseChangeset
2276 2278 if cs_cache is None:
2277 2279 # use no-cache version here
2278 2280 scm_repo = self.scm_instance(cache=False, config=config)
2279 2281
2280 2282 empty = not scm_repo or scm_repo.is_empty()
2281 2283 if not empty:
2282 2284 cs_cache = scm_repo.get_commit(
2283 2285 pre_load=["author", "date", "message", "parents"])
2284 2286 else:
2285 2287 cs_cache = EmptyCommit()
2286 2288
2287 2289 if isinstance(cs_cache, BaseChangeset):
2288 2290 cs_cache = cs_cache.__json__()
2289 2291
2290 2292 def is_outdated(new_cs_cache):
2291 2293 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2292 2294 new_cs_cache['revision'] != self.changeset_cache['revision']):
2293 2295 return True
2294 2296 return False
2295 2297
2296 2298 # check if we have maybe already latest cached revision
2297 2299 if is_outdated(cs_cache) or not self.changeset_cache:
2298 2300 _default = datetime.datetime.utcnow()
2299 2301 last_change = cs_cache.get('date') or _default
2300 2302 if self.updated_on and self.updated_on > last_change:
2301 2303 # we check if last update is newer than the new value
2302 2304 # if yes, we use the current timestamp instead. Imagine you get
2303 2305 # old commit pushed 1y ago, we'd set last update 1y to ago.
2304 2306 last_change = _default
2305 2307 log.debug('updated repo %s with new cs cache %s',
2306 2308 self.repo_name, cs_cache)
2307 2309 self.updated_on = last_change
2308 2310 self.changeset_cache = cs_cache
2309 2311 Session().add(self)
2310 2312 Session().commit()
2311 2313 else:
2312 2314 log.debug('Skipping update_commit_cache for repo:`%s` '
2313 2315 'commit already with latest changes', self.repo_name)
2314 2316
2315 2317 @property
2316 2318 def tip(self):
2317 2319 return self.get_commit('tip')
2318 2320
2319 2321 @property
2320 2322 def author(self):
2321 2323 return self.tip.author
2322 2324
2323 2325 @property
2324 2326 def last_change(self):
2325 2327 return self.scm_instance().last_change
2326 2328
2327 2329 def get_comments(self, revisions=None):
2328 2330 """
2329 2331 Returns comments for this repository grouped by revisions
2330 2332
2331 2333 :param revisions: filter query by revisions only
2332 2334 """
2333 2335 cmts = ChangesetComment.query()\
2334 2336 .filter(ChangesetComment.repo == self)
2335 2337 if revisions:
2336 2338 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2337 2339 grouped = collections.defaultdict(list)
2338 2340 for cmt in cmts.all():
2339 2341 grouped[cmt.revision].append(cmt)
2340 2342 return grouped
2341 2343
2342 2344 def statuses(self, revisions=None):
2343 2345 """
2344 2346 Returns statuses for this repository
2345 2347
2346 2348 :param revisions: list of revisions to get statuses for
2347 2349 """
2348 2350 statuses = ChangesetStatus.query()\
2349 2351 .filter(ChangesetStatus.repo == self)\
2350 2352 .filter(ChangesetStatus.version == 0)
2351 2353
2352 2354 if revisions:
2353 2355 # Try doing the filtering in chunks to avoid hitting limits
2354 2356 size = 500
2355 2357 status_results = []
2356 2358 for chunk in xrange(0, len(revisions), size):
2357 2359 status_results += statuses.filter(
2358 2360 ChangesetStatus.revision.in_(
2359 2361 revisions[chunk: chunk+size])
2360 2362 ).all()
2361 2363 else:
2362 2364 status_results = statuses.all()
2363 2365
2364 2366 grouped = {}
2365 2367
2366 2368 # maybe we have open new pullrequest without a status?
2367 2369 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2368 2370 status_lbl = ChangesetStatus.get_status_lbl(stat)
2369 2371 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2370 2372 for rev in pr.revisions:
2371 2373 pr_id = pr.pull_request_id
2372 2374 pr_repo = pr.target_repo.repo_name
2373 2375 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2374 2376
2375 2377 for stat in status_results:
2376 2378 pr_id = pr_repo = None
2377 2379 if stat.pull_request:
2378 2380 pr_id = stat.pull_request.pull_request_id
2379 2381 pr_repo = stat.pull_request.target_repo.repo_name
2380 2382 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2381 2383 pr_id, pr_repo]
2382 2384 return grouped
2383 2385
2384 2386 # ==========================================================================
2385 2387 # SCM CACHE INSTANCE
2386 2388 # ==========================================================================
2387 2389
2388 2390 def scm_instance(self, **kwargs):
2389 2391 import rhodecode
2390 2392
2391 2393 # Passing a config will not hit the cache currently only used
2392 2394 # for repo2dbmapper
2393 2395 config = kwargs.pop('config', None)
2394 2396 cache = kwargs.pop('cache', None)
2395 2397 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2396 2398 # if cache is NOT defined use default global, else we have a full
2397 2399 # control over cache behaviour
2398 2400 if cache is None and full_cache and not config:
2399 2401 return self._get_instance_cached()
2400 2402 return self._get_instance(cache=bool(cache), config=config)
2401 2403
2402 2404 def _get_instance_cached(self):
2403 2405 from rhodecode.lib import rc_cache
2404 2406
2405 2407 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2406 2408 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2407 2409 repo_id=self.repo_id)
2408 2410 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2409 2411
2410 2412 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2411 2413 def get_instance_cached(repo_id, context_id):
2412 2414 return self._get_instance()
2413 2415
2414 2416 # we must use thread scoped cache here,
2415 2417 # because each thread of gevent needs it's own not shared connection and cache
2416 2418 # we also alter `args` so the cache key is individual for every green thread.
2417 2419 inv_context_manager = rc_cache.InvalidationContext(
2418 2420 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2419 2421 thread_scoped=True)
2420 2422 with inv_context_manager as invalidation_context:
2421 2423 args = (self.repo_id, inv_context_manager.cache_key)
2422 2424 # re-compute and store cache if we get invalidate signal
2423 2425 if invalidation_context.should_invalidate():
2424 2426 instance = get_instance_cached.refresh(*args)
2425 2427 else:
2426 2428 instance = get_instance_cached(*args)
2427 2429
2428 2430 log.debug(
2429 2431 'Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2430 2432 return instance
2431 2433
2432 2434 def _get_instance(self, cache=True, config=None):
2433 2435 config = config or self._config
2434 2436 custom_wire = {
2435 2437 'cache': cache # controls the vcs.remote cache
2436 2438 }
2437 2439 repo = get_vcs_instance(
2438 2440 repo_path=safe_str(self.repo_full_path),
2439 2441 config=config,
2440 2442 with_wire=custom_wire,
2441 2443 create=False,
2442 2444 _vcs_alias=self.repo_type)
2443 2445
2444 2446 return repo
2445 2447
2446 2448 def __json__(self):
2447 2449 return {'landing_rev': self.landing_rev}
2448 2450
2449 2451 def get_dict(self):
2450 2452
2451 2453 # Since we transformed `repo_name` to a hybrid property, we need to
2452 2454 # keep compatibility with the code which uses `repo_name` field.
2453 2455
2454 2456 result = super(Repository, self).get_dict()
2455 2457 result['repo_name'] = result.pop('_repo_name', None)
2456 2458 return result
2457 2459
2458 2460
2459 2461 class RepoGroup(Base, BaseModel):
2460 2462 __tablename__ = 'groups'
2461 2463 __table_args__ = (
2462 2464 UniqueConstraint('group_name', 'group_parent_id'),
2463 2465 base_table_args,
2464 2466 )
2465 2467 __mapper_args__ = {'order_by': 'group_name'}
2466 2468
2467 2469 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2468 2470
2469 2471 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2470 2472 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2471 2473 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2472 2474 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2473 2475 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2474 2476 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2475 2477 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2476 2478 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2477 2479 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2478 2480
2479 2481 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2480 2482 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2481 2483 parent_group = relationship('RepoGroup', remote_side=group_id)
2482 2484 user = relationship('User')
2483 2485 integrations = relationship('Integration', cascade="all, delete, delete-orphan")
2484 2486
2485 2487 def __init__(self, group_name='', parent_group=None):
2486 2488 self.group_name = group_name
2487 2489 self.parent_group = parent_group
2488 2490
2489 2491 def __unicode__(self):
2490 2492 return u"<%s('id:%s:%s')>" % (
2491 2493 self.__class__.__name__, self.group_id, self.group_name)
2492 2494
2493 2495 @validates('group_parent_id')
2494 2496 def validate_group_parent_id(self, key, val):
2495 2497 """
2496 2498 Check cycle references for a parent group to self
2497 2499 """
2498 2500 if self.group_id and val:
2499 2501 assert val != self.group_id
2500 2502
2501 2503 return val
2502 2504
2503 2505 @hybrid_property
2504 2506 def description_safe(self):
2505 2507 from rhodecode.lib import helpers as h
2506 2508 return h.escape(self.group_description)
2507 2509
2508 2510 @classmethod
2509 2511 def _generate_choice(cls, repo_group):
2510 2512 from webhelpers.html import literal as _literal
2511 2513 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2512 2514 return repo_group.group_id, _name(repo_group.full_path_splitted)
2513 2515
2514 2516 @classmethod
2515 2517 def groups_choices(cls, groups=None, show_empty_group=True):
2516 2518 if not groups:
2517 2519 groups = cls.query().all()
2518 2520
2519 2521 repo_groups = []
2520 2522 if show_empty_group:
2521 2523 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2522 2524
2523 2525 repo_groups.extend([cls._generate_choice(x) for x in groups])
2524 2526
2525 2527 repo_groups = sorted(
2526 2528 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2527 2529 return repo_groups
2528 2530
2529 2531 @classmethod
2530 2532 def url_sep(cls):
2531 2533 return URL_SEP
2532 2534
2533 2535 @classmethod
2534 2536 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2535 2537 if case_insensitive:
2536 2538 gr = cls.query().filter(func.lower(cls.group_name)
2537 2539 == func.lower(group_name))
2538 2540 else:
2539 2541 gr = cls.query().filter(cls.group_name == group_name)
2540 2542 if cache:
2541 2543 name_key = _hash_key(group_name)
2542 2544 gr = gr.options(
2543 2545 FromCache("sql_cache_short", "get_group_%s" % name_key))
2544 2546 return gr.scalar()
2545 2547
2546 2548 @classmethod
2547 2549 def get_user_personal_repo_group(cls, user_id):
2548 2550 user = User.get(user_id)
2549 2551 if user.username == User.DEFAULT_USER:
2550 2552 return None
2551 2553
2552 2554 return cls.query()\
2553 2555 .filter(cls.personal == true()) \
2554 2556 .filter(cls.user == user) \
2555 2557 .order_by(cls.group_id.asc()) \
2556 2558 .first()
2557 2559
2558 2560 @classmethod
2559 2561 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2560 2562 case_insensitive=True):
2561 2563 q = RepoGroup.query()
2562 2564
2563 2565 if not isinstance(user_id, Optional):
2564 2566 q = q.filter(RepoGroup.user_id == user_id)
2565 2567
2566 2568 if not isinstance(group_id, Optional):
2567 2569 q = q.filter(RepoGroup.group_parent_id == group_id)
2568 2570
2569 2571 if case_insensitive:
2570 2572 q = q.order_by(func.lower(RepoGroup.group_name))
2571 2573 else:
2572 2574 q = q.order_by(RepoGroup.group_name)
2573 2575 return q.all()
2574 2576
2575 2577 @property
2576 2578 def parents(self):
2577 2579 parents_recursion_limit = 10
2578 2580 groups = []
2579 2581 if self.parent_group is None:
2580 2582 return groups
2581 2583 cur_gr = self.parent_group
2582 2584 groups.insert(0, cur_gr)
2583 2585 cnt = 0
2584 2586 while 1:
2585 2587 cnt += 1
2586 2588 gr = getattr(cur_gr, 'parent_group', None)
2587 2589 cur_gr = cur_gr.parent_group
2588 2590 if gr is None:
2589 2591 break
2590 2592 if cnt == parents_recursion_limit:
2591 2593 # this will prevent accidental infinit loops
2592 2594 log.error('more than %s parents found for group %s, stopping '
2593 2595 'recursive parent fetching', parents_recursion_limit, self)
2594 2596 break
2595 2597
2596 2598 groups.insert(0, gr)
2597 2599 return groups
2598 2600
2599 2601 @property
2600 2602 def last_db_change(self):
2601 2603 return self.updated_on
2602 2604
2603 2605 @property
2604 2606 def children(self):
2605 2607 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2606 2608
2607 2609 @property
2608 2610 def name(self):
2609 2611 return self.group_name.split(RepoGroup.url_sep())[-1]
2610 2612
2611 2613 @property
2612 2614 def full_path(self):
2613 2615 return self.group_name
2614 2616
2615 2617 @property
2616 2618 def full_path_splitted(self):
2617 2619 return self.group_name.split(RepoGroup.url_sep())
2618 2620
2619 2621 @property
2620 2622 def repositories(self):
2621 2623 return Repository.query()\
2622 2624 .filter(Repository.group == self)\
2623 2625 .order_by(Repository.repo_name)
2624 2626
2625 2627 @property
2626 2628 def repositories_recursive_count(self):
2627 2629 cnt = self.repositories.count()
2628 2630
2629 2631 def children_count(group):
2630 2632 cnt = 0
2631 2633 for child in group.children:
2632 2634 cnt += child.repositories.count()
2633 2635 cnt += children_count(child)
2634 2636 return cnt
2635 2637
2636 2638 return cnt + children_count(self)
2637 2639
2638 2640 def _recursive_objects(self, include_repos=True):
2639 2641 all_ = []
2640 2642
2641 2643 def _get_members(root_gr):
2642 2644 if include_repos:
2643 2645 for r in root_gr.repositories:
2644 2646 all_.append(r)
2645 2647 childs = root_gr.children.all()
2646 2648 if childs:
2647 2649 for gr in childs:
2648 2650 all_.append(gr)
2649 2651 _get_members(gr)
2650 2652
2651 2653 _get_members(self)
2652 2654 return [self] + all_
2653 2655
2654 2656 def recursive_groups_and_repos(self):
2655 2657 """
2656 2658 Recursive return all groups, with repositories in those groups
2657 2659 """
2658 2660 return self._recursive_objects()
2659 2661
2660 2662 def recursive_groups(self):
2661 2663 """
2662 2664 Returns all children groups for this group including children of children
2663 2665 """
2664 2666 return self._recursive_objects(include_repos=False)
2665 2667
2666 2668 def get_new_name(self, group_name):
2667 2669 """
2668 2670 returns new full group name based on parent and new name
2669 2671
2670 2672 :param group_name:
2671 2673 """
2672 2674 path_prefix = (self.parent_group.full_path_splitted if
2673 2675 self.parent_group else [])
2674 2676 return RepoGroup.url_sep().join(path_prefix + [group_name])
2675 2677
2676 2678 def permissions(self, with_admins=True, with_owner=True,
2677 2679 expand_from_user_groups=False):
2678 2680 """
2679 2681 Permissions for repository groups
2680 2682 """
2681 2683 _admin_perm = 'group.admin'
2682 2684
2683 2685 owner_row = []
2684 2686 if with_owner:
2685 2687 usr = AttributeDict(self.user.get_dict())
2686 2688 usr.owner_row = True
2687 2689 usr.permission = _admin_perm
2688 2690 owner_row.append(usr)
2689 2691
2690 2692 super_admin_ids = []
2691 2693 super_admin_rows = []
2692 2694 if with_admins:
2693 2695 for usr in User.get_all_super_admins():
2694 2696 super_admin_ids.append(usr.user_id)
2695 2697 # if this admin is also owner, don't double the record
2696 2698 if usr.user_id == owner_row[0].user_id:
2697 2699 owner_row[0].admin_row = True
2698 2700 else:
2699 2701 usr = AttributeDict(usr.get_dict())
2700 2702 usr.admin_row = True
2701 2703 usr.permission = _admin_perm
2702 2704 super_admin_rows.append(usr)
2703 2705
2704 2706 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2705 2707 q = q.options(joinedload(UserRepoGroupToPerm.group),
2706 2708 joinedload(UserRepoGroupToPerm.user),
2707 2709 joinedload(UserRepoGroupToPerm.permission),)
2708 2710
2709 2711 # get owners and admins and permissions. We do a trick of re-writing
2710 2712 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2711 2713 # has a global reference and changing one object propagates to all
2712 2714 # others. This means if admin is also an owner admin_row that change
2713 2715 # would propagate to both objects
2714 2716 perm_rows = []
2715 2717 for _usr in q.all():
2716 2718 usr = AttributeDict(_usr.user.get_dict())
2717 2719 # if this user is also owner/admin, mark as duplicate record
2718 2720 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2719 2721 usr.duplicate_perm = True
2720 2722 usr.permission = _usr.permission.permission_name
2721 2723 perm_rows.append(usr)
2722 2724
2723 2725 # filter the perm rows by 'default' first and then sort them by
2724 2726 # admin,write,read,none permissions sorted again alphabetically in
2725 2727 # each group
2726 2728 perm_rows = sorted(perm_rows, key=display_user_sort)
2727 2729
2728 2730 user_groups_rows = []
2729 2731 if expand_from_user_groups:
2730 2732 for ug in self.permission_user_groups(with_members=True):
2731 2733 for user_data in ug.members:
2732 2734 user_groups_rows.append(user_data)
2733 2735
2734 2736 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2735 2737
2736 2738 def permission_user_groups(self, with_members=False):
2737 2739 q = UserGroupRepoGroupToPerm.query()\
2738 2740 .filter(UserGroupRepoGroupToPerm.group == self)
2739 2741 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2740 2742 joinedload(UserGroupRepoGroupToPerm.users_group),
2741 2743 joinedload(UserGroupRepoGroupToPerm.permission),)
2742 2744
2743 2745 perm_rows = []
2744 2746 for _user_group in q.all():
2745 2747 entry = AttributeDict(_user_group.users_group.get_dict())
2746 2748 entry.permission = _user_group.permission.permission_name
2747 2749 if with_members:
2748 2750 entry.members = [x.user.get_dict()
2749 2751 for x in _user_group.users_group.members]
2750 2752 perm_rows.append(entry)
2751 2753
2752 2754 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2753 2755 return perm_rows
2754 2756
2755 2757 def get_api_data(self):
2756 2758 """
2757 2759 Common function for generating api data
2758 2760
2759 2761 """
2760 2762 group = self
2761 2763 data = {
2762 2764 'group_id': group.group_id,
2763 2765 'group_name': group.group_name,
2764 2766 'group_description': group.description_safe,
2765 2767 'parent_group': group.parent_group.group_name if group.parent_group else None,
2766 2768 'repositories': [x.repo_name for x in group.repositories],
2767 2769 'owner': group.user.username,
2768 2770 }
2769 2771 return data
2770 2772
2771 2773
2772 2774 class Permission(Base, BaseModel):
2773 2775 __tablename__ = 'permissions'
2774 2776 __table_args__ = (
2775 2777 Index('p_perm_name_idx', 'permission_name'),
2776 2778 base_table_args,
2777 2779 )
2778 2780
2779 2781 PERMS = [
2780 2782 ('hg.admin', _('RhodeCode Super Administrator')),
2781 2783
2782 2784 ('repository.none', _('Repository no access')),
2783 2785 ('repository.read', _('Repository read access')),
2784 2786 ('repository.write', _('Repository write access')),
2785 2787 ('repository.admin', _('Repository admin access')),
2786 2788
2787 2789 ('group.none', _('Repository group no access')),
2788 2790 ('group.read', _('Repository group read access')),
2789 2791 ('group.write', _('Repository group write access')),
2790 2792 ('group.admin', _('Repository group admin access')),
2791 2793
2792 2794 ('usergroup.none', _('User group no access')),
2793 2795 ('usergroup.read', _('User group read access')),
2794 2796 ('usergroup.write', _('User group write access')),
2795 2797 ('usergroup.admin', _('User group admin access')),
2796 2798
2797 2799 ('branch.none', _('Branch no permissions')),
2798 2800 ('branch.merge', _('Branch access by web merge')),
2799 2801 ('branch.push', _('Branch access by push')),
2800 2802 ('branch.push_force', _('Branch access by push with force')),
2801 2803
2802 2804 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2803 2805 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2804 2806
2805 2807 ('hg.usergroup.create.false', _('User Group creation disabled')),
2806 2808 ('hg.usergroup.create.true', _('User Group creation enabled')),
2807 2809
2808 2810 ('hg.create.none', _('Repository creation disabled')),
2809 2811 ('hg.create.repository', _('Repository creation enabled')),
2810 2812 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2811 2813 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2812 2814
2813 2815 ('hg.fork.none', _('Repository forking disabled')),
2814 2816 ('hg.fork.repository', _('Repository forking enabled')),
2815 2817
2816 2818 ('hg.register.none', _('Registration disabled')),
2817 2819 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2818 2820 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2819 2821
2820 2822 ('hg.password_reset.enabled', _('Password reset enabled')),
2821 2823 ('hg.password_reset.hidden', _('Password reset hidden')),
2822 2824 ('hg.password_reset.disabled', _('Password reset disabled')),
2823 2825
2824 2826 ('hg.extern_activate.manual', _('Manual activation of external account')),
2825 2827 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2826 2828
2827 2829 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2828 2830 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2829 2831 ]
2830 2832
2831 2833 # definition of system default permissions for DEFAULT user, created on
2832 2834 # system setup
2833 2835 DEFAULT_USER_PERMISSIONS = [
2834 2836 # object perms
2835 2837 'repository.read',
2836 2838 'group.read',
2837 2839 'usergroup.read',
2838 2840 # branch, for backward compat we need same value as before so forced pushed
2839 2841 'branch.push_force',
2840 2842 # global
2841 2843 'hg.create.repository',
2842 2844 'hg.repogroup.create.false',
2843 2845 'hg.usergroup.create.false',
2844 2846 'hg.create.write_on_repogroup.true',
2845 2847 'hg.fork.repository',
2846 2848 'hg.register.manual_activate',
2847 2849 'hg.password_reset.enabled',
2848 2850 'hg.extern_activate.auto',
2849 2851 'hg.inherit_default_perms.true',
2850 2852 ]
2851 2853
2852 2854 # defines which permissions are more important higher the more important
2853 2855 # Weight defines which permissions are more important.
2854 2856 # The higher number the more important.
2855 2857 PERM_WEIGHTS = {
2856 2858 'repository.none': 0,
2857 2859 'repository.read': 1,
2858 2860 'repository.write': 3,
2859 2861 'repository.admin': 4,
2860 2862
2861 2863 'group.none': 0,
2862 2864 'group.read': 1,
2863 2865 'group.write': 3,
2864 2866 'group.admin': 4,
2865 2867
2866 2868 'usergroup.none': 0,
2867 2869 'usergroup.read': 1,
2868 2870 'usergroup.write': 3,
2869 2871 'usergroup.admin': 4,
2870 2872
2871 2873 'branch.none': 0,
2872 2874 'branch.merge': 1,
2873 2875 'branch.push': 3,
2874 2876 'branch.push_force': 4,
2875 2877
2876 2878 'hg.repogroup.create.false': 0,
2877 2879 'hg.repogroup.create.true': 1,
2878 2880
2879 2881 'hg.usergroup.create.false': 0,
2880 2882 'hg.usergroup.create.true': 1,
2881 2883
2882 2884 'hg.fork.none': 0,
2883 2885 'hg.fork.repository': 1,
2884 2886 'hg.create.none': 0,
2885 2887 'hg.create.repository': 1
2886 2888 }
2887 2889
2888 2890 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2889 2891 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2890 2892 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2891 2893
2892 2894 def __unicode__(self):
2893 2895 return u"<%s('%s:%s')>" % (
2894 2896 self.__class__.__name__, self.permission_id, self.permission_name
2895 2897 )
2896 2898
2897 2899 @classmethod
2898 2900 def get_by_key(cls, key):
2899 2901 return cls.query().filter(cls.permission_name == key).scalar()
2900 2902
2901 2903 @classmethod
2902 2904 def get_default_repo_perms(cls, user_id, repo_id=None):
2903 2905 q = Session().query(UserRepoToPerm, Repository, Permission)\
2904 2906 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2905 2907 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2906 2908 .filter(UserRepoToPerm.user_id == user_id)
2907 2909 if repo_id:
2908 2910 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2909 2911 return q.all()
2910 2912
2911 2913 @classmethod
2912 2914 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
2913 2915 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
2914 2916 .join(
2915 2917 Permission,
2916 2918 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
2917 2919 .join(
2918 2920 UserRepoToPerm,
2919 2921 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
2920 2922 .filter(UserRepoToPerm.user_id == user_id)
2921 2923
2922 2924 if repo_id:
2923 2925 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
2924 2926 return q.order_by(UserToRepoBranchPermission.rule_order).all()
2925 2927
2926 2928 @classmethod
2927 2929 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2928 2930 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2929 2931 .join(
2930 2932 Permission,
2931 2933 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2932 2934 .join(
2933 2935 Repository,
2934 2936 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2935 2937 .join(
2936 2938 UserGroup,
2937 2939 UserGroupRepoToPerm.users_group_id ==
2938 2940 UserGroup.users_group_id)\
2939 2941 .join(
2940 2942 UserGroupMember,
2941 2943 UserGroupRepoToPerm.users_group_id ==
2942 2944 UserGroupMember.users_group_id)\
2943 2945 .filter(
2944 2946 UserGroupMember.user_id == user_id,
2945 2947 UserGroup.users_group_active == true())
2946 2948 if repo_id:
2947 2949 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2948 2950 return q.all()
2949 2951
2950 2952 @classmethod
2951 2953 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
2952 2954 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
2953 2955 .join(
2954 2956 Permission,
2955 2957 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
2956 2958 .join(
2957 2959 UserGroupRepoToPerm,
2958 2960 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
2959 2961 .join(
2960 2962 UserGroup,
2961 2963 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
2962 2964 .join(
2963 2965 UserGroupMember,
2964 2966 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
2965 2967 .filter(
2966 2968 UserGroupMember.user_id == user_id,
2967 2969 UserGroup.users_group_active == true())
2968 2970
2969 2971 if repo_id:
2970 2972 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
2971 2973 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
2972 2974
2973 2975 @classmethod
2974 2976 def get_default_group_perms(cls, user_id, repo_group_id=None):
2975 2977 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2976 2978 .join(
2977 2979 Permission,
2978 2980 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
2979 2981 .join(
2980 2982 RepoGroup,
2981 2983 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
2982 2984 .filter(UserRepoGroupToPerm.user_id == user_id)
2983 2985 if repo_group_id:
2984 2986 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2985 2987 return q.all()
2986 2988
2987 2989 @classmethod
2988 2990 def get_default_group_perms_from_user_group(
2989 2991 cls, user_id, repo_group_id=None):
2990 2992 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2991 2993 .join(
2992 2994 Permission,
2993 2995 UserGroupRepoGroupToPerm.permission_id ==
2994 2996 Permission.permission_id)\
2995 2997 .join(
2996 2998 RepoGroup,
2997 2999 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2998 3000 .join(
2999 3001 UserGroup,
3000 3002 UserGroupRepoGroupToPerm.users_group_id ==
3001 3003 UserGroup.users_group_id)\
3002 3004 .join(
3003 3005 UserGroupMember,
3004 3006 UserGroupRepoGroupToPerm.users_group_id ==
3005 3007 UserGroupMember.users_group_id)\
3006 3008 .filter(
3007 3009 UserGroupMember.user_id == user_id,
3008 3010 UserGroup.users_group_active == true())
3009 3011 if repo_group_id:
3010 3012 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3011 3013 return q.all()
3012 3014
3013 3015 @classmethod
3014 3016 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3015 3017 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3016 3018 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3017 3019 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3018 3020 .filter(UserUserGroupToPerm.user_id == user_id)
3019 3021 if user_group_id:
3020 3022 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3021 3023 return q.all()
3022 3024
3023 3025 @classmethod
3024 3026 def get_default_user_group_perms_from_user_group(
3025 3027 cls, user_id, user_group_id=None):
3026 3028 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3027 3029 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3028 3030 .join(
3029 3031 Permission,
3030 3032 UserGroupUserGroupToPerm.permission_id ==
3031 3033 Permission.permission_id)\
3032 3034 .join(
3033 3035 TargetUserGroup,
3034 3036 UserGroupUserGroupToPerm.target_user_group_id ==
3035 3037 TargetUserGroup.users_group_id)\
3036 3038 .join(
3037 3039 UserGroup,
3038 3040 UserGroupUserGroupToPerm.user_group_id ==
3039 3041 UserGroup.users_group_id)\
3040 3042 .join(
3041 3043 UserGroupMember,
3042 3044 UserGroupUserGroupToPerm.user_group_id ==
3043 3045 UserGroupMember.users_group_id)\
3044 3046 .filter(
3045 3047 UserGroupMember.user_id == user_id,
3046 3048 UserGroup.users_group_active == true())
3047 3049 if user_group_id:
3048 3050 q = q.filter(
3049 3051 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3050 3052
3051 3053 return q.all()
3052 3054
3053 3055
3054 3056 class UserRepoToPerm(Base, BaseModel):
3055 3057 __tablename__ = 'repo_to_perm'
3056 3058 __table_args__ = (
3057 3059 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3058 3060 base_table_args
3059 3061 )
3060 3062
3061 3063 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3062 3064 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3063 3065 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3064 3066 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3065 3067
3066 3068 user = relationship('User')
3067 3069 repository = relationship('Repository')
3068 3070 permission = relationship('Permission')
3069 3071
3070 3072 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete, delete-orphan", lazy='joined')
3071 3073
3072 3074 @classmethod
3073 3075 def create(cls, user, repository, permission):
3074 3076 n = cls()
3075 3077 n.user = user
3076 3078 n.repository = repository
3077 3079 n.permission = permission
3078 3080 Session().add(n)
3079 3081 return n
3080 3082
3081 3083 def __unicode__(self):
3082 3084 return u'<%s => %s >' % (self.user, self.repository)
3083 3085
3084 3086
3085 3087 class UserUserGroupToPerm(Base, BaseModel):
3086 3088 __tablename__ = 'user_user_group_to_perm'
3087 3089 __table_args__ = (
3088 3090 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3089 3091 base_table_args
3090 3092 )
3091 3093
3092 3094 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3093 3095 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3094 3096 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3095 3097 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3096 3098
3097 3099 user = relationship('User')
3098 3100 user_group = relationship('UserGroup')
3099 3101 permission = relationship('Permission')
3100 3102
3101 3103 @classmethod
3102 3104 def create(cls, user, user_group, permission):
3103 3105 n = cls()
3104 3106 n.user = user
3105 3107 n.user_group = user_group
3106 3108 n.permission = permission
3107 3109 Session().add(n)
3108 3110 return n
3109 3111
3110 3112 def __unicode__(self):
3111 3113 return u'<%s => %s >' % (self.user, self.user_group)
3112 3114
3113 3115
3114 3116 class UserToPerm(Base, BaseModel):
3115 3117 __tablename__ = 'user_to_perm'
3116 3118 __table_args__ = (
3117 3119 UniqueConstraint('user_id', 'permission_id'),
3118 3120 base_table_args
3119 3121 )
3120 3122
3121 3123 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3122 3124 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3123 3125 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3124 3126
3125 3127 user = relationship('User')
3126 3128 permission = relationship('Permission', lazy='joined')
3127 3129
3128 3130 def __unicode__(self):
3129 3131 return u'<%s => %s >' % (self.user, self.permission)
3130 3132
3131 3133
3132 3134 class UserGroupRepoToPerm(Base, BaseModel):
3133 3135 __tablename__ = 'users_group_repo_to_perm'
3134 3136 __table_args__ = (
3135 3137 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3136 3138 base_table_args
3137 3139 )
3138 3140
3139 3141 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3140 3142 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3141 3143 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3142 3144 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3143 3145
3144 3146 users_group = relationship('UserGroup')
3145 3147 permission = relationship('Permission')
3146 3148 repository = relationship('Repository')
3147 3149 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3148 3150
3149 3151 @classmethod
3150 3152 def create(cls, users_group, repository, permission):
3151 3153 n = cls()
3152 3154 n.users_group = users_group
3153 3155 n.repository = repository
3154 3156 n.permission = permission
3155 3157 Session().add(n)
3156 3158 return n
3157 3159
3158 3160 def __unicode__(self):
3159 3161 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3160 3162
3161 3163
3162 3164 class UserGroupUserGroupToPerm(Base, BaseModel):
3163 3165 __tablename__ = 'user_group_user_group_to_perm'
3164 3166 __table_args__ = (
3165 3167 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3166 3168 CheckConstraint('target_user_group_id != user_group_id'),
3167 3169 base_table_args
3168 3170 )
3169 3171
3170 3172 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3171 3173 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3172 3174 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3173 3175 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3174 3176
3175 3177 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3176 3178 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3177 3179 permission = relationship('Permission')
3178 3180
3179 3181 @classmethod
3180 3182 def create(cls, target_user_group, user_group, permission):
3181 3183 n = cls()
3182 3184 n.target_user_group = target_user_group
3183 3185 n.user_group = user_group
3184 3186 n.permission = permission
3185 3187 Session().add(n)
3186 3188 return n
3187 3189
3188 3190 def __unicode__(self):
3189 3191 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3190 3192
3191 3193
3192 3194 class UserGroupToPerm(Base, BaseModel):
3193 3195 __tablename__ = 'users_group_to_perm'
3194 3196 __table_args__ = (
3195 3197 UniqueConstraint('users_group_id', 'permission_id',),
3196 3198 base_table_args
3197 3199 )
3198 3200
3199 3201 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3200 3202 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3201 3203 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3202 3204
3203 3205 users_group = relationship('UserGroup')
3204 3206 permission = relationship('Permission')
3205 3207
3206 3208
3207 3209 class UserRepoGroupToPerm(Base, BaseModel):
3208 3210 __tablename__ = 'user_repo_group_to_perm'
3209 3211 __table_args__ = (
3210 3212 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3211 3213 base_table_args
3212 3214 )
3213 3215
3214 3216 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3215 3217 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3216 3218 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3217 3219 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3218 3220
3219 3221 user = relationship('User')
3220 3222 group = relationship('RepoGroup')
3221 3223 permission = relationship('Permission')
3222 3224
3223 3225 @classmethod
3224 3226 def create(cls, user, repository_group, permission):
3225 3227 n = cls()
3226 3228 n.user = user
3227 3229 n.group = repository_group
3228 3230 n.permission = permission
3229 3231 Session().add(n)
3230 3232 return n
3231 3233
3232 3234
3233 3235 class UserGroupRepoGroupToPerm(Base, BaseModel):
3234 3236 __tablename__ = 'users_group_repo_group_to_perm'
3235 3237 __table_args__ = (
3236 3238 UniqueConstraint('users_group_id', 'group_id'),
3237 3239 base_table_args
3238 3240 )
3239 3241
3240 3242 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3241 3243 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3242 3244 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3243 3245 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3244 3246
3245 3247 users_group = relationship('UserGroup')
3246 3248 permission = relationship('Permission')
3247 3249 group = relationship('RepoGroup')
3248 3250
3249 3251 @classmethod
3250 3252 def create(cls, user_group, repository_group, permission):
3251 3253 n = cls()
3252 3254 n.users_group = user_group
3253 3255 n.group = repository_group
3254 3256 n.permission = permission
3255 3257 Session().add(n)
3256 3258 return n
3257 3259
3258 3260 def __unicode__(self):
3259 3261 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3260 3262
3261 3263
3262 3264 class Statistics(Base, BaseModel):
3263 3265 __tablename__ = 'statistics'
3264 3266 __table_args__ = (
3265 3267 base_table_args
3266 3268 )
3267 3269
3268 3270 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3269 3271 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3270 3272 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3271 3273 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3272 3274 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3273 3275 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3274 3276
3275 3277 repository = relationship('Repository', single_parent=True)
3276 3278
3277 3279
3278 3280 class UserFollowing(Base, BaseModel):
3279 3281 __tablename__ = 'user_followings'
3280 3282 __table_args__ = (
3281 3283 UniqueConstraint('user_id', 'follows_repository_id'),
3282 3284 UniqueConstraint('user_id', 'follows_user_id'),
3283 3285 base_table_args
3284 3286 )
3285 3287
3286 3288 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3287 3289 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3288 3290 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3289 3291 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3290 3292 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3291 3293
3292 3294 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3293 3295
3294 3296 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3295 3297 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3296 3298
3297 3299 @classmethod
3298 3300 def get_repo_followers(cls, repo_id):
3299 3301 return cls.query().filter(cls.follows_repo_id == repo_id)
3300 3302
3301 3303
3302 3304 class CacheKey(Base, BaseModel):
3303 3305 __tablename__ = 'cache_invalidation'
3304 3306 __table_args__ = (
3305 3307 UniqueConstraint('cache_key'),
3306 3308 Index('key_idx', 'cache_key'),
3307 3309 base_table_args,
3308 3310 )
3309 3311
3310 3312 CACHE_TYPE_FEED = 'FEED'
3311 3313 CACHE_TYPE_README = 'README'
3312 3314 # namespaces used to register process/thread aware caches
3313 3315 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3314 3316 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3315 3317
3316 3318 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3317 3319 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3318 3320 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3319 3321 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3320 3322
3321 3323 def __init__(self, cache_key, cache_args=''):
3322 3324 self.cache_key = cache_key
3323 3325 self.cache_args = cache_args
3324 3326 self.cache_active = False
3325 3327
3326 3328 def __unicode__(self):
3327 3329 return u"<%s('%s:%s[%s]')>" % (
3328 3330 self.__class__.__name__,
3329 3331 self.cache_id, self.cache_key, self.cache_active)
3330 3332
3331 3333 def _cache_key_partition(self):
3332 3334 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3333 3335 return prefix, repo_name, suffix
3334 3336
3335 3337 def get_prefix(self):
3336 3338 """
3337 3339 Try to extract prefix from existing cache key. The key could consist
3338 3340 of prefix, repo_name, suffix
3339 3341 """
3340 3342 # this returns prefix, repo_name, suffix
3341 3343 return self._cache_key_partition()[0]
3342 3344
3343 3345 def get_suffix(self):
3344 3346 """
3345 3347 get suffix that might have been used in _get_cache_key to
3346 3348 generate self.cache_key. Only used for informational purposes
3347 3349 in repo_edit.mako.
3348 3350 """
3349 3351 # prefix, repo_name, suffix
3350 3352 return self._cache_key_partition()[2]
3351 3353
3352 3354 @classmethod
3353 3355 def delete_all_cache(cls):
3354 3356 """
3355 3357 Delete all cache keys from database.
3356 3358 Should only be run when all instances are down and all entries
3357 3359 thus stale.
3358 3360 """
3359 3361 cls.query().delete()
3360 3362 Session().commit()
3361 3363
3362 3364 @classmethod
3363 3365 def set_invalidate(cls, cache_uid, delete=False):
3364 3366 """
3365 3367 Mark all caches of a repo as invalid in the database.
3366 3368 """
3367 3369
3368 3370 try:
3369 3371 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3370 3372 if delete:
3371 3373 qry.delete()
3372 3374 log.debug('cache objects deleted for cache args %s',
3373 3375 safe_str(cache_uid))
3374 3376 else:
3375 3377 qry.update({"cache_active": False})
3376 3378 log.debug('cache objects marked as invalid for cache args %s',
3377 3379 safe_str(cache_uid))
3378 3380
3379 3381 Session().commit()
3380 3382 except Exception:
3381 3383 log.exception(
3382 3384 'Cache key invalidation failed for cache args %s',
3383 3385 safe_str(cache_uid))
3384 3386 Session().rollback()
3385 3387
3386 3388 @classmethod
3387 3389 def get_active_cache(cls, cache_key):
3388 3390 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3389 3391 if inv_obj:
3390 3392 return inv_obj
3391 3393 return None
3392 3394
3393 3395
3394 3396 class ChangesetComment(Base, BaseModel):
3395 3397 __tablename__ = 'changeset_comments'
3396 3398 __table_args__ = (
3397 3399 Index('cc_revision_idx', 'revision'),
3398 3400 base_table_args,
3399 3401 )
3400 3402
3401 3403 COMMENT_OUTDATED = u'comment_outdated'
3402 3404 COMMENT_TYPE_NOTE = u'note'
3403 3405 COMMENT_TYPE_TODO = u'todo'
3404 3406 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3405 3407
3406 3408 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3407 3409 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3408 3410 revision = Column('revision', String(40), nullable=True)
3409 3411 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3410 3412 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3411 3413 line_no = Column('line_no', Unicode(10), nullable=True)
3412 3414 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3413 3415 f_path = Column('f_path', Unicode(1000), nullable=True)
3414 3416 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3415 3417 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3416 3418 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3417 3419 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3418 3420 renderer = Column('renderer', Unicode(64), nullable=True)
3419 3421 display_state = Column('display_state', Unicode(128), nullable=True)
3420 3422
3421 3423 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3422 3424 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3423 3425
3424 3426 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3425 3427 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3426 3428
3427 3429 author = relationship('User', lazy='joined')
3428 3430 repo = relationship('Repository')
3429 3431 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3430 3432 pull_request = relationship('PullRequest', lazy='joined')
3431 3433 pull_request_version = relationship('PullRequestVersion')
3432 3434
3433 3435 @classmethod
3434 3436 def get_users(cls, revision=None, pull_request_id=None):
3435 3437 """
3436 3438 Returns user associated with this ChangesetComment. ie those
3437 3439 who actually commented
3438 3440
3439 3441 :param cls:
3440 3442 :param revision:
3441 3443 """
3442 3444 q = Session().query(User)\
3443 3445 .join(ChangesetComment.author)
3444 3446 if revision:
3445 3447 q = q.filter(cls.revision == revision)
3446 3448 elif pull_request_id:
3447 3449 q = q.filter(cls.pull_request_id == pull_request_id)
3448 3450 return q.all()
3449 3451
3450 3452 @classmethod
3451 3453 def get_index_from_version(cls, pr_version, versions):
3452 3454 num_versions = [x.pull_request_version_id for x in versions]
3453 3455 try:
3454 3456 return num_versions.index(pr_version) +1
3455 3457 except (IndexError, ValueError):
3456 3458 return
3457 3459
3458 3460 @property
3459 3461 def outdated(self):
3460 3462 return self.display_state == self.COMMENT_OUTDATED
3461 3463
3462 3464 def outdated_at_version(self, version):
3463 3465 """
3464 3466 Checks if comment is outdated for given pull request version
3465 3467 """
3466 3468 return self.outdated and self.pull_request_version_id != version
3467 3469
3468 3470 def older_than_version(self, version):
3469 3471 """
3470 3472 Checks if comment is made from previous version than given
3471 3473 """
3472 3474 if version is None:
3473 3475 return self.pull_request_version_id is not None
3474 3476
3475 3477 return self.pull_request_version_id < version
3476 3478
3477 3479 @property
3478 3480 def resolved(self):
3479 3481 return self.resolved_by[0] if self.resolved_by else None
3480 3482
3481 3483 @property
3482 3484 def is_todo(self):
3483 3485 return self.comment_type == self.COMMENT_TYPE_TODO
3484 3486
3485 3487 @property
3486 3488 def is_inline(self):
3487 3489 return self.line_no and self.f_path
3488 3490
3489 3491 def get_index_version(self, versions):
3490 3492 return self.get_index_from_version(
3491 3493 self.pull_request_version_id, versions)
3492 3494
3493 3495 def __repr__(self):
3494 3496 if self.comment_id:
3495 3497 return '<DB:Comment #%s>' % self.comment_id
3496 3498 else:
3497 3499 return '<DB:Comment at %#x>' % id(self)
3498 3500
3499 3501 def get_api_data(self):
3500 3502 comment = self
3501 3503 data = {
3502 3504 'comment_id': comment.comment_id,
3503 3505 'comment_type': comment.comment_type,
3504 3506 'comment_text': comment.text,
3505 3507 'comment_status': comment.status_change,
3506 3508 'comment_f_path': comment.f_path,
3507 3509 'comment_lineno': comment.line_no,
3508 3510 'comment_author': comment.author,
3509 3511 'comment_created_on': comment.created_on,
3510 3512 'comment_resolved_by': self.resolved
3511 3513 }
3512 3514 return data
3513 3515
3514 3516 def __json__(self):
3515 3517 data = dict()
3516 3518 data.update(self.get_api_data())
3517 3519 return data
3518 3520
3519 3521
3520 3522 class ChangesetStatus(Base, BaseModel):
3521 3523 __tablename__ = 'changeset_statuses'
3522 3524 __table_args__ = (
3523 3525 Index('cs_revision_idx', 'revision'),
3524 3526 Index('cs_version_idx', 'version'),
3525 3527 UniqueConstraint('repo_id', 'revision', 'version'),
3526 3528 base_table_args
3527 3529 )
3528 3530
3529 3531 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3530 3532 STATUS_APPROVED = 'approved'
3531 3533 STATUS_REJECTED = 'rejected'
3532 3534 STATUS_UNDER_REVIEW = 'under_review'
3533 3535
3534 3536 STATUSES = [
3535 3537 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3536 3538 (STATUS_APPROVED, _("Approved")),
3537 3539 (STATUS_REJECTED, _("Rejected")),
3538 3540 (STATUS_UNDER_REVIEW, _("Under Review")),
3539 3541 ]
3540 3542
3541 3543 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3542 3544 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3543 3545 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3544 3546 revision = Column('revision', String(40), nullable=False)
3545 3547 status = Column('status', String(128), nullable=False, default=DEFAULT)
3546 3548 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3547 3549 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3548 3550 version = Column('version', Integer(), nullable=False, default=0)
3549 3551 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3550 3552
3551 3553 author = relationship('User', lazy='joined')
3552 3554 repo = relationship('Repository')
3553 3555 comment = relationship('ChangesetComment', lazy='joined')
3554 3556 pull_request = relationship('PullRequest', lazy='joined')
3555 3557
3556 3558 def __unicode__(self):
3557 3559 return u"<%s('%s[v%s]:%s')>" % (
3558 3560 self.__class__.__name__,
3559 3561 self.status, self.version, self.author
3560 3562 )
3561 3563
3562 3564 @classmethod
3563 3565 def get_status_lbl(cls, value):
3564 3566 return dict(cls.STATUSES).get(value)
3565 3567
3566 3568 @property
3567 3569 def status_lbl(self):
3568 3570 return ChangesetStatus.get_status_lbl(self.status)
3569 3571
3570 3572 def get_api_data(self):
3571 3573 status = self
3572 3574 data = {
3573 3575 'status_id': status.changeset_status_id,
3574 3576 'status': status.status,
3575 3577 }
3576 3578 return data
3577 3579
3578 3580 def __json__(self):
3579 3581 data = dict()
3580 3582 data.update(self.get_api_data())
3581 3583 return data
3582 3584
3583 3585
3584 3586 class _SetState(object):
3585 3587 """
3586 3588 Context processor allowing changing state for sensitive operation such as
3587 3589 pull request update or merge
3588 3590 """
3589 3591
3590 3592 def __init__(self, pull_request, pr_state, back_state=None):
3591 3593 self._pr = pull_request
3592 3594 self._org_state = back_state or pull_request.pull_request_state
3593 3595 self._pr_state = pr_state
3594 3596
3595 3597 def __enter__(self):
3596 3598 log.debug('StateLock: entering set state context, setting state to: `%s`',
3597 3599 self._pr_state)
3598 3600 self._pr.pull_request_state = self._pr_state
3599 3601 Session().add(self._pr)
3600 3602 Session().commit()
3601 3603
3602 3604 def __exit__(self, exc_type, exc_val, exc_tb):
3603 3605 log.debug('StateLock: exiting set state context, setting state to: `%s`',
3604 3606 self._org_state)
3605 3607 self._pr.pull_request_state = self._org_state
3606 3608 Session().add(self._pr)
3607 3609 Session().commit()
3608 3610
3609 3611
3610 3612 class _PullRequestBase(BaseModel):
3611 3613 """
3612 3614 Common attributes of pull request and version entries.
3613 3615 """
3614 3616
3615 3617 # .status values
3616 3618 STATUS_NEW = u'new'
3617 3619 STATUS_OPEN = u'open'
3618 3620 STATUS_CLOSED = u'closed'
3619 3621
3620 3622 # available states
3621 3623 STATE_CREATING = u'creating'
3622 3624 STATE_UPDATING = u'updating'
3623 3625 STATE_MERGING = u'merging'
3624 3626 STATE_CREATED = u'created'
3625 3627
3626 3628 title = Column('title', Unicode(255), nullable=True)
3627 3629 description = Column(
3628 3630 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3629 3631 nullable=True)
3630 3632 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3631 3633
3632 3634 # new/open/closed status of pull request (not approve/reject/etc)
3633 3635 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3634 3636 created_on = Column(
3635 3637 'created_on', DateTime(timezone=False), nullable=False,
3636 3638 default=datetime.datetime.now)
3637 3639 updated_on = Column(
3638 3640 'updated_on', DateTime(timezone=False), nullable=False,
3639 3641 default=datetime.datetime.now)
3640 3642
3641 3643 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3642 3644
3643 3645 @declared_attr
3644 3646 def user_id(cls):
3645 3647 return Column(
3646 3648 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3647 3649 unique=None)
3648 3650
3649 3651 # 500 revisions max
3650 3652 _revisions = Column(
3651 3653 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3652 3654
3653 3655 @declared_attr
3654 3656 def source_repo_id(cls):
3655 3657 # TODO: dan: rename column to source_repo_id
3656 3658 return Column(
3657 3659 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3658 3660 nullable=False)
3659 3661
3660 3662 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3661 3663
3662 3664 @hybrid_property
3663 3665 def source_ref(self):
3664 3666 return self._source_ref
3665 3667
3666 3668 @source_ref.setter
3667 3669 def source_ref(self, val):
3668 3670 parts = (val or '').split(':')
3669 3671 if len(parts) != 3:
3670 3672 raise ValueError(
3671 3673 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3672 3674 self._source_ref = safe_unicode(val)
3673 3675
3674 3676 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3675 3677
3676 3678 @hybrid_property
3677 3679 def target_ref(self):
3678 3680 return self._target_ref
3679 3681
3680 3682 @target_ref.setter
3681 3683 def target_ref(self, val):
3682 3684 parts = (val or '').split(':')
3683 3685 if len(parts) != 3:
3684 3686 raise ValueError(
3685 3687 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3686 3688 self._target_ref = safe_unicode(val)
3687 3689
3688 3690 @declared_attr
3689 3691 def target_repo_id(cls):
3690 3692 # TODO: dan: rename column to target_repo_id
3691 3693 return Column(
3692 3694 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3693 3695 nullable=False)
3694 3696
3695 3697 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3696 3698
3697 3699 # TODO: dan: rename column to last_merge_source_rev
3698 3700 _last_merge_source_rev = Column(
3699 3701 'last_merge_org_rev', String(40), nullable=True)
3700 3702 # TODO: dan: rename column to last_merge_target_rev
3701 3703 _last_merge_target_rev = Column(
3702 3704 'last_merge_other_rev', String(40), nullable=True)
3703 3705 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3704 3706 merge_rev = Column('merge_rev', String(40), nullable=True)
3705 3707
3706 3708 reviewer_data = Column(
3707 3709 'reviewer_data_json', MutationObj.as_mutable(
3708 3710 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3709 3711
3710 3712 @property
3711 3713 def reviewer_data_json(self):
3712 3714 return json.dumps(self.reviewer_data)
3713 3715
3714 3716 @hybrid_property
3715 3717 def description_safe(self):
3716 3718 from rhodecode.lib import helpers as h
3717 3719 return h.escape(self.description)
3718 3720
3719 3721 @hybrid_property
3720 3722 def revisions(self):
3721 3723 return self._revisions.split(':') if self._revisions else []
3722 3724
3723 3725 @revisions.setter
3724 3726 def revisions(self, val):
3725 3727 self._revisions = ':'.join(val)
3726 3728
3727 3729 @hybrid_property
3728 3730 def last_merge_status(self):
3729 3731 return safe_int(self._last_merge_status)
3730 3732
3731 3733 @last_merge_status.setter
3732 3734 def last_merge_status(self, val):
3733 3735 self._last_merge_status = val
3734 3736
3735 3737 @declared_attr
3736 3738 def author(cls):
3737 3739 return relationship('User', lazy='joined')
3738 3740
3739 3741 @declared_attr
3740 3742 def source_repo(cls):
3741 3743 return relationship(
3742 3744 'Repository',
3743 3745 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3744 3746
3745 3747 @property
3746 3748 def source_ref_parts(self):
3747 3749 return self.unicode_to_reference(self.source_ref)
3748 3750
3749 3751 @declared_attr
3750 3752 def target_repo(cls):
3751 3753 return relationship(
3752 3754 'Repository',
3753 3755 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3754 3756
3755 3757 @property
3756 3758 def target_ref_parts(self):
3757 3759 return self.unicode_to_reference(self.target_ref)
3758 3760
3759 3761 @property
3760 3762 def shadow_merge_ref(self):
3761 3763 return self.unicode_to_reference(self._shadow_merge_ref)
3762 3764
3763 3765 @shadow_merge_ref.setter
3764 3766 def shadow_merge_ref(self, ref):
3765 3767 self._shadow_merge_ref = self.reference_to_unicode(ref)
3766 3768
3767 3769 @staticmethod
3768 3770 def unicode_to_reference(raw):
3769 3771 """
3770 3772 Convert a unicode (or string) to a reference object.
3771 3773 If unicode evaluates to False it returns None.
3772 3774 """
3773 3775 if raw:
3774 3776 refs = raw.split(':')
3775 3777 return Reference(*refs)
3776 3778 else:
3777 3779 return None
3778 3780
3779 3781 @staticmethod
3780 3782 def reference_to_unicode(ref):
3781 3783 """
3782 3784 Convert a reference object to unicode.
3783 3785 If reference is None it returns None.
3784 3786 """
3785 3787 if ref:
3786 3788 return u':'.join(ref)
3787 3789 else:
3788 3790 return None
3789 3791
3790 3792 def get_api_data(self, with_merge_state=True):
3791 3793 from rhodecode.model.pull_request import PullRequestModel
3792 3794
3793 3795 pull_request = self
3794 3796 if with_merge_state:
3795 3797 merge_status = PullRequestModel().merge_status(pull_request)
3796 3798 merge_state = {
3797 3799 'status': merge_status[0],
3798 3800 'message': safe_unicode(merge_status[1]),
3799 3801 }
3800 3802 else:
3801 3803 merge_state = {'status': 'not_available',
3802 3804 'message': 'not_available'}
3803 3805
3804 3806 merge_data = {
3805 3807 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3806 3808 'reference': (
3807 3809 pull_request.shadow_merge_ref._asdict()
3808 3810 if pull_request.shadow_merge_ref else None),
3809 3811 }
3810 3812
3811 3813 data = {
3812 3814 'pull_request_id': pull_request.pull_request_id,
3813 3815 'url': PullRequestModel().get_url(pull_request),
3814 3816 'title': pull_request.title,
3815 3817 'description': pull_request.description,
3816 3818 'status': pull_request.status,
3817 3819 'state': pull_request.pull_request_state,
3818 3820 'created_on': pull_request.created_on,
3819 3821 'updated_on': pull_request.updated_on,
3820 3822 'commit_ids': pull_request.revisions,
3821 3823 'review_status': pull_request.calculated_review_status(),
3822 3824 'mergeable': merge_state,
3823 3825 'source': {
3824 3826 'clone_url': pull_request.source_repo.clone_url(),
3825 3827 'repository': pull_request.source_repo.repo_name,
3826 3828 'reference': {
3827 3829 'name': pull_request.source_ref_parts.name,
3828 3830 'type': pull_request.source_ref_parts.type,
3829 3831 'commit_id': pull_request.source_ref_parts.commit_id,
3830 3832 },
3831 3833 },
3832 3834 'target': {
3833 3835 'clone_url': pull_request.target_repo.clone_url(),
3834 3836 'repository': pull_request.target_repo.repo_name,
3835 3837 'reference': {
3836 3838 'name': pull_request.target_ref_parts.name,
3837 3839 'type': pull_request.target_ref_parts.type,
3838 3840 'commit_id': pull_request.target_ref_parts.commit_id,
3839 3841 },
3840 3842 },
3841 3843 'merge': merge_data,
3842 3844 'author': pull_request.author.get_api_data(include_secrets=False,
3843 3845 details='basic'),
3844 3846 'reviewers': [
3845 3847 {
3846 3848 'user': reviewer.get_api_data(include_secrets=False,
3847 3849 details='basic'),
3848 3850 'reasons': reasons,
3849 3851 'review_status': st[0][1].status if st else 'not_reviewed',
3850 3852 }
3851 3853 for obj, reviewer, reasons, mandatory, st in
3852 3854 pull_request.reviewers_statuses()
3853 3855 ]
3854 3856 }
3855 3857
3856 3858 return data
3857 3859
3858 3860 def set_state(self, pull_request_state, final_state=None):
3859 3861 """
3860 3862 # goes from initial state to updating to initial state.
3861 3863 # initial state can be changed by specifying back_state=
3862 3864 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
3863 3865 pull_request.merge()
3864 3866
3865 3867 :param pull_request_state:
3866 3868 :param final_state:
3867 3869
3868 3870 """
3869 3871
3870 3872 return _SetState(self, pull_request_state, back_state=final_state)
3871 3873
3872 3874
3873 3875 class PullRequest(Base, _PullRequestBase):
3874 3876 __tablename__ = 'pull_requests'
3875 3877 __table_args__ = (
3876 3878 base_table_args,
3877 3879 )
3878 3880
3879 3881 pull_request_id = Column(
3880 3882 'pull_request_id', Integer(), nullable=False, primary_key=True)
3881 3883
3882 3884 def __repr__(self):
3883 3885 if self.pull_request_id:
3884 3886 return '<DB:PullRequest #%s>' % self.pull_request_id
3885 3887 else:
3886 3888 return '<DB:PullRequest at %#x>' % id(self)
3887 3889
3888 3890 reviewers = relationship('PullRequestReviewers',
3889 3891 cascade="all, delete, delete-orphan")
3890 3892 statuses = relationship('ChangesetStatus',
3891 3893 cascade="all, delete, delete-orphan")
3892 3894 comments = relationship('ChangesetComment',
3893 3895 cascade="all, delete, delete-orphan")
3894 3896 versions = relationship('PullRequestVersion',
3895 3897 cascade="all, delete, delete-orphan",
3896 3898 lazy='dynamic')
3897 3899
3898 3900 @classmethod
3899 3901 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3900 3902 internal_methods=None):
3901 3903
3902 3904 class PullRequestDisplay(object):
3903 3905 """
3904 3906 Special object wrapper for showing PullRequest data via Versions
3905 3907 It mimics PR object as close as possible. This is read only object
3906 3908 just for display
3907 3909 """
3908 3910
3909 3911 def __init__(self, attrs, internal=None):
3910 3912 self.attrs = attrs
3911 3913 # internal have priority over the given ones via attrs
3912 3914 self.internal = internal or ['versions']
3913 3915
3914 3916 def __getattr__(self, item):
3915 3917 if item in self.internal:
3916 3918 return getattr(self, item)
3917 3919 try:
3918 3920 return self.attrs[item]
3919 3921 except KeyError:
3920 3922 raise AttributeError(
3921 3923 '%s object has no attribute %s' % (self, item))
3922 3924
3923 3925 def __repr__(self):
3924 3926 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3925 3927
3926 3928 def versions(self):
3927 3929 return pull_request_obj.versions.order_by(
3928 3930 PullRequestVersion.pull_request_version_id).all()
3929 3931
3930 3932 def is_closed(self):
3931 3933 return pull_request_obj.is_closed()
3932 3934
3933 3935 @property
3934 3936 def pull_request_version_id(self):
3935 3937 return getattr(pull_request_obj, 'pull_request_version_id', None)
3936 3938
3937 3939 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3938 3940
3939 3941 attrs.author = StrictAttributeDict(
3940 3942 pull_request_obj.author.get_api_data())
3941 3943 if pull_request_obj.target_repo:
3942 3944 attrs.target_repo = StrictAttributeDict(
3943 3945 pull_request_obj.target_repo.get_api_data())
3944 3946 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3945 3947
3946 3948 if pull_request_obj.source_repo:
3947 3949 attrs.source_repo = StrictAttributeDict(
3948 3950 pull_request_obj.source_repo.get_api_data())
3949 3951 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3950 3952
3951 3953 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3952 3954 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3953 3955 attrs.revisions = pull_request_obj.revisions
3954 3956
3955 3957 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3956 3958 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3957 3959 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3958 3960
3959 3961 return PullRequestDisplay(attrs, internal=internal_methods)
3960 3962
3961 3963 def is_closed(self):
3962 3964 return self.status == self.STATUS_CLOSED
3963 3965
3964 3966 def __json__(self):
3965 3967 return {
3966 3968 'revisions': self.revisions,
3967 3969 }
3968 3970
3969 3971 def calculated_review_status(self):
3970 3972 from rhodecode.model.changeset_status import ChangesetStatusModel
3971 3973 return ChangesetStatusModel().calculated_review_status(self)
3972 3974
3973 3975 def reviewers_statuses(self):
3974 3976 from rhodecode.model.changeset_status import ChangesetStatusModel
3975 3977 return ChangesetStatusModel().reviewers_statuses(self)
3976 3978
3977 3979 @property
3978 3980 def workspace_id(self):
3979 3981 from rhodecode.model.pull_request import PullRequestModel
3980 3982 return PullRequestModel()._workspace_id(self)
3981 3983
3982 3984 def get_shadow_repo(self):
3983 3985 workspace_id = self.workspace_id
3984 3986 vcs_obj = self.target_repo.scm_instance()
3985 3987 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3986 3988 self.target_repo.repo_id, workspace_id)
3987 3989 if os.path.isdir(shadow_repository_path):
3988 3990 return vcs_obj._get_shadow_instance(shadow_repository_path)
3989 3991
3990 3992
3991 3993 class PullRequestVersion(Base, _PullRequestBase):
3992 3994 __tablename__ = 'pull_request_versions'
3993 3995 __table_args__ = (
3994 3996 base_table_args,
3995 3997 )
3996 3998
3997 3999 pull_request_version_id = Column(
3998 4000 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3999 4001 pull_request_id = Column(
4000 4002 'pull_request_id', Integer(),
4001 4003 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4002 4004 pull_request = relationship('PullRequest')
4003 4005
4004 4006 def __repr__(self):
4005 4007 if self.pull_request_version_id:
4006 4008 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4007 4009 else:
4008 4010 return '<DB:PullRequestVersion at %#x>' % id(self)
4009 4011
4010 4012 @property
4011 4013 def reviewers(self):
4012 4014 return self.pull_request.reviewers
4013 4015
4014 4016 @property
4015 4017 def versions(self):
4016 4018 return self.pull_request.versions
4017 4019
4018 4020 def is_closed(self):
4019 4021 # calculate from original
4020 4022 return self.pull_request.status == self.STATUS_CLOSED
4021 4023
4022 4024 def calculated_review_status(self):
4023 4025 return self.pull_request.calculated_review_status()
4024 4026
4025 4027 def reviewers_statuses(self):
4026 4028 return self.pull_request.reviewers_statuses()
4027 4029
4028 4030
4029 4031 class PullRequestReviewers(Base, BaseModel):
4030 4032 __tablename__ = 'pull_request_reviewers'
4031 4033 __table_args__ = (
4032 4034 base_table_args,
4033 4035 )
4034 4036
4035 4037 @hybrid_property
4036 4038 def reasons(self):
4037 4039 if not self._reasons:
4038 4040 return []
4039 4041 return self._reasons
4040 4042
4041 4043 @reasons.setter
4042 4044 def reasons(self, val):
4043 4045 val = val or []
4044 4046 if any(not isinstance(x, compat.string_types) for x in val):
4045 4047 raise Exception('invalid reasons type, must be list of strings')
4046 4048 self._reasons = val
4047 4049
4048 4050 pull_requests_reviewers_id = Column(
4049 4051 'pull_requests_reviewers_id', Integer(), nullable=False,
4050 4052 primary_key=True)
4051 4053 pull_request_id = Column(
4052 4054 "pull_request_id", Integer(),
4053 4055 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4054 4056 user_id = Column(
4055 4057 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4056 4058 _reasons = Column(
4057 4059 'reason', MutationList.as_mutable(
4058 4060 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4059 4061
4060 4062 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4061 4063 user = relationship('User')
4062 4064 pull_request = relationship('PullRequest')
4063 4065
4064 4066 rule_data = Column(
4065 4067 'rule_data_json',
4066 4068 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4067 4069
4068 4070 def rule_user_group_data(self):
4069 4071 """
4070 4072 Returns the voting user group rule data for this reviewer
4071 4073 """
4072 4074
4073 4075 if self.rule_data and 'vote_rule' in self.rule_data:
4074 4076 user_group_data = {}
4075 4077 if 'rule_user_group_entry_id' in self.rule_data:
4076 4078 # means a group with voting rules !
4077 4079 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4078 4080 user_group_data['name'] = self.rule_data['rule_name']
4079 4081 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4080 4082
4081 4083 return user_group_data
4082 4084
4083 4085 def __unicode__(self):
4084 4086 return u"<%s('id:%s')>" % (self.__class__.__name__,
4085 4087 self.pull_requests_reviewers_id)
4086 4088
4087 4089
4088 4090 class Notification(Base, BaseModel):
4089 4091 __tablename__ = 'notifications'
4090 4092 __table_args__ = (
4091 4093 Index('notification_type_idx', 'type'),
4092 4094 base_table_args,
4093 4095 )
4094 4096
4095 4097 TYPE_CHANGESET_COMMENT = u'cs_comment'
4096 4098 TYPE_MESSAGE = u'message'
4097 4099 TYPE_MENTION = u'mention'
4098 4100 TYPE_REGISTRATION = u'registration'
4099 4101 TYPE_PULL_REQUEST = u'pull_request'
4100 4102 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4101 4103
4102 4104 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4103 4105 subject = Column('subject', Unicode(512), nullable=True)
4104 4106 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4105 4107 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4106 4108 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4107 4109 type_ = Column('type', Unicode(255))
4108 4110
4109 4111 created_by_user = relationship('User')
4110 4112 notifications_to_users = relationship('UserNotification', lazy='joined',
4111 4113 cascade="all, delete, delete-orphan")
4112 4114
4113 4115 @property
4114 4116 def recipients(self):
4115 4117 return [x.user for x in UserNotification.query()\
4116 4118 .filter(UserNotification.notification == self)\
4117 4119 .order_by(UserNotification.user_id.asc()).all()]
4118 4120
4119 4121 @classmethod
4120 4122 def create(cls, created_by, subject, body, recipients, type_=None):
4121 4123 if type_ is None:
4122 4124 type_ = Notification.TYPE_MESSAGE
4123 4125
4124 4126 notification = cls()
4125 4127 notification.created_by_user = created_by
4126 4128 notification.subject = subject
4127 4129 notification.body = body
4128 4130 notification.type_ = type_
4129 4131 notification.created_on = datetime.datetime.now()
4130 4132
4131 4133 # For each recipient link the created notification to his account
4132 4134 for u in recipients:
4133 4135 assoc = UserNotification()
4134 4136 assoc.user_id = u.user_id
4135 4137 assoc.notification = notification
4136 4138
4137 4139 # if created_by is inside recipients mark his notification
4138 4140 # as read
4139 4141 if u.user_id == created_by.user_id:
4140 4142 assoc.read = True
4141 4143 Session().add(assoc)
4142 4144
4143 4145 Session().add(notification)
4144 4146
4145 4147 return notification
4146 4148
4147 4149
4148 4150 class UserNotification(Base, BaseModel):
4149 4151 __tablename__ = 'user_to_notification'
4150 4152 __table_args__ = (
4151 4153 UniqueConstraint('user_id', 'notification_id'),
4152 4154 base_table_args
4153 4155 )
4154 4156
4155 4157 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4156 4158 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4157 4159 read = Column('read', Boolean, default=False)
4158 4160 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4159 4161
4160 4162 user = relationship('User', lazy="joined")
4161 4163 notification = relationship('Notification', lazy="joined",
4162 4164 order_by=lambda: Notification.created_on.desc(),)
4163 4165
4164 4166 def mark_as_read(self):
4165 4167 self.read = True
4166 4168 Session().add(self)
4167 4169
4168 4170
4169 4171 class Gist(Base, BaseModel):
4170 4172 __tablename__ = 'gists'
4171 4173 __table_args__ = (
4172 4174 Index('g_gist_access_id_idx', 'gist_access_id'),
4173 4175 Index('g_created_on_idx', 'created_on'),
4174 4176 base_table_args
4175 4177 )
4176 4178
4177 4179 GIST_PUBLIC = u'public'
4178 4180 GIST_PRIVATE = u'private'
4179 4181 DEFAULT_FILENAME = u'gistfile1.txt'
4180 4182
4181 4183 ACL_LEVEL_PUBLIC = u'acl_public'
4182 4184 ACL_LEVEL_PRIVATE = u'acl_private'
4183 4185
4184 4186 gist_id = Column('gist_id', Integer(), primary_key=True)
4185 4187 gist_access_id = Column('gist_access_id', Unicode(250))
4186 4188 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4187 4189 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4188 4190 gist_expires = Column('gist_expires', Float(53), nullable=False)
4189 4191 gist_type = Column('gist_type', Unicode(128), nullable=False)
4190 4192 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4191 4193 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4192 4194 acl_level = Column('acl_level', Unicode(128), nullable=True)
4193 4195
4194 4196 owner = relationship('User')
4195 4197
4196 4198 def __repr__(self):
4197 4199 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4198 4200
4199 4201 @hybrid_property
4200 4202 def description_safe(self):
4201 4203 from rhodecode.lib import helpers as h
4202 4204 return h.escape(self.gist_description)
4203 4205
4204 4206 @classmethod
4205 4207 def get_or_404(cls, id_):
4206 4208 from pyramid.httpexceptions import HTTPNotFound
4207 4209
4208 4210 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4209 4211 if not res:
4210 4212 raise HTTPNotFound()
4211 4213 return res
4212 4214
4213 4215 @classmethod
4214 4216 def get_by_access_id(cls, gist_access_id):
4215 4217 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4216 4218
4217 4219 def gist_url(self):
4218 4220 from rhodecode.model.gist import GistModel
4219 4221 return GistModel().get_url(self)
4220 4222
4221 4223 @classmethod
4222 4224 def base_path(cls):
4223 4225 """
4224 4226 Returns base path when all gists are stored
4225 4227
4226 4228 :param cls:
4227 4229 """
4228 4230 from rhodecode.model.gist import GIST_STORE_LOC
4229 4231 q = Session().query(RhodeCodeUi)\
4230 4232 .filter(RhodeCodeUi.ui_key == URL_SEP)
4231 4233 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4232 4234 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4233 4235
4234 4236 def get_api_data(self):
4235 4237 """
4236 4238 Common function for generating gist related data for API
4237 4239 """
4238 4240 gist = self
4239 4241 data = {
4240 4242 'gist_id': gist.gist_id,
4241 4243 'type': gist.gist_type,
4242 4244 'access_id': gist.gist_access_id,
4243 4245 'description': gist.gist_description,
4244 4246 'url': gist.gist_url(),
4245 4247 'expires': gist.gist_expires,
4246 4248 'created_on': gist.created_on,
4247 4249 'modified_at': gist.modified_at,
4248 4250 'content': None,
4249 4251 'acl_level': gist.acl_level,
4250 4252 }
4251 4253 return data
4252 4254
4253 4255 def __json__(self):
4254 4256 data = dict(
4255 4257 )
4256 4258 data.update(self.get_api_data())
4257 4259 return data
4258 4260 # SCM functions
4259 4261
4260 4262 def scm_instance(self, **kwargs):
4261 4263 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4262 4264 return get_vcs_instance(
4263 4265 repo_path=safe_str(full_repo_path), create=False)
4264 4266
4265 4267
4266 4268 class ExternalIdentity(Base, BaseModel):
4267 4269 __tablename__ = 'external_identities'
4268 4270 __table_args__ = (
4269 4271 Index('local_user_id_idx', 'local_user_id'),
4270 4272 Index('external_id_idx', 'external_id'),
4271 4273 base_table_args
4272 4274 )
4273 4275
4274 4276 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4275 4277 external_username = Column('external_username', Unicode(1024), default=u'')
4276 4278 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4277 4279 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4278 4280 access_token = Column('access_token', String(1024), default=u'')
4279 4281 alt_token = Column('alt_token', String(1024), default=u'')
4280 4282 token_secret = Column('token_secret', String(1024), default=u'')
4281 4283
4282 4284 @classmethod
4283 4285 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4284 4286 """
4285 4287 Returns ExternalIdentity instance based on search params
4286 4288
4287 4289 :param external_id:
4288 4290 :param provider_name:
4289 4291 :return: ExternalIdentity
4290 4292 """
4291 4293 query = cls.query()
4292 4294 query = query.filter(cls.external_id == external_id)
4293 4295 query = query.filter(cls.provider_name == provider_name)
4294 4296 if local_user_id:
4295 4297 query = query.filter(cls.local_user_id == local_user_id)
4296 4298 return query.first()
4297 4299
4298 4300 @classmethod
4299 4301 def user_by_external_id_and_provider(cls, external_id, provider_name):
4300 4302 """
4301 4303 Returns User instance based on search params
4302 4304
4303 4305 :param external_id:
4304 4306 :param provider_name:
4305 4307 :return: User
4306 4308 """
4307 4309 query = User.query()
4308 4310 query = query.filter(cls.external_id == external_id)
4309 4311 query = query.filter(cls.provider_name == provider_name)
4310 4312 query = query.filter(User.user_id == cls.local_user_id)
4311 4313 return query.first()
4312 4314
4313 4315 @classmethod
4314 4316 def by_local_user_id(cls, local_user_id):
4315 4317 """
4316 4318 Returns all tokens for user
4317 4319
4318 4320 :param local_user_id:
4319 4321 :return: ExternalIdentity
4320 4322 """
4321 4323 query = cls.query()
4322 4324 query = query.filter(cls.local_user_id == local_user_id)
4323 4325 return query
4324 4326
4325 4327 @classmethod
4326 4328 def load_provider_plugin(cls, plugin_id):
4327 4329 from rhodecode.authentication.base import loadplugin
4328 4330 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4329 4331 auth_plugin = loadplugin(_plugin_id)
4330 4332 return auth_plugin
4331 4333
4332 4334
4333 4335 class Integration(Base, BaseModel):
4334 4336 __tablename__ = 'integrations'
4335 4337 __table_args__ = (
4336 4338 base_table_args
4337 4339 )
4338 4340
4339 4341 integration_id = Column('integration_id', Integer(), primary_key=True)
4340 4342 integration_type = Column('integration_type', String(255))
4341 4343 enabled = Column('enabled', Boolean(), nullable=False)
4342 4344 name = Column('name', String(255), nullable=False)
4343 4345 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4344 4346 default=False)
4345 4347
4346 4348 settings = Column(
4347 4349 'settings_json', MutationObj.as_mutable(
4348 4350 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4349 4351 repo_id = Column(
4350 4352 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4351 4353 nullable=True, unique=None, default=None)
4352 4354 repo = relationship('Repository', lazy='joined')
4353 4355
4354 4356 repo_group_id = Column(
4355 4357 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4356 4358 nullable=True, unique=None, default=None)
4357 4359 repo_group = relationship('RepoGroup', lazy='joined')
4358 4360
4359 4361 @property
4360 4362 def scope(self):
4361 4363 if self.repo:
4362 4364 return repr(self.repo)
4363 4365 if self.repo_group:
4364 4366 if self.child_repos_only:
4365 4367 return repr(self.repo_group) + ' (child repos only)'
4366 4368 else:
4367 4369 return repr(self.repo_group) + ' (recursive)'
4368 4370 if self.child_repos_only:
4369 4371 return 'root_repos'
4370 4372 return 'global'
4371 4373
4372 4374 def __repr__(self):
4373 4375 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4374 4376
4375 4377
4376 4378 class RepoReviewRuleUser(Base, BaseModel):
4377 4379 __tablename__ = 'repo_review_rules_users'
4378 4380 __table_args__ = (
4379 4381 base_table_args
4380 4382 )
4381 4383
4382 4384 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4383 4385 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4384 4386 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4385 4387 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4386 4388 user = relationship('User')
4387 4389
4388 4390 def rule_data(self):
4389 4391 return {
4390 4392 'mandatory': self.mandatory
4391 4393 }
4392 4394
4393 4395
4394 4396 class RepoReviewRuleUserGroup(Base, BaseModel):
4395 4397 __tablename__ = 'repo_review_rules_users_groups'
4396 4398 __table_args__ = (
4397 4399 base_table_args
4398 4400 )
4399 4401
4400 4402 VOTE_RULE_ALL = -1
4401 4403
4402 4404 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4403 4405 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4404 4406 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4405 4407 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4406 4408 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4407 4409 users_group = relationship('UserGroup')
4408 4410
4409 4411 def rule_data(self):
4410 4412 return {
4411 4413 'mandatory': self.mandatory,
4412 4414 'vote_rule': self.vote_rule
4413 4415 }
4414 4416
4415 4417 @property
4416 4418 def vote_rule_label(self):
4417 4419 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4418 4420 return 'all must vote'
4419 4421 else:
4420 4422 return 'min. vote {}'.format(self.vote_rule)
4421 4423
4422 4424
4423 4425 class RepoReviewRule(Base, BaseModel):
4424 4426 __tablename__ = 'repo_review_rules'
4425 4427 __table_args__ = (
4426 4428 base_table_args
4427 4429 )
4428 4430
4429 4431 repo_review_rule_id = Column(
4430 4432 'repo_review_rule_id', Integer(), primary_key=True)
4431 4433 repo_id = Column(
4432 4434 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4433 4435 repo = relationship('Repository', backref='review_rules')
4434 4436
4435 4437 review_rule_name = Column('review_rule_name', String(255))
4436 4438 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4437 4439 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4438 4440 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4439 4441
4440 4442 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4441 4443 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4442 4444 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4443 4445 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4444 4446
4445 4447 rule_users = relationship('RepoReviewRuleUser')
4446 4448 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4447 4449
4448 4450 def _validate_pattern(self, value):
4449 4451 re.compile('^' + glob2re(value) + '$')
4450 4452
4451 4453 @hybrid_property
4452 4454 def source_branch_pattern(self):
4453 4455 return self._branch_pattern or '*'
4454 4456
4455 4457 @source_branch_pattern.setter
4456 4458 def source_branch_pattern(self, value):
4457 4459 self._validate_pattern(value)
4458 4460 self._branch_pattern = value or '*'
4459 4461
4460 4462 @hybrid_property
4461 4463 def target_branch_pattern(self):
4462 4464 return self._target_branch_pattern or '*'
4463 4465
4464 4466 @target_branch_pattern.setter
4465 4467 def target_branch_pattern(self, value):
4466 4468 self._validate_pattern(value)
4467 4469 self._target_branch_pattern = value or '*'
4468 4470
4469 4471 @hybrid_property
4470 4472 def file_pattern(self):
4471 4473 return self._file_pattern or '*'
4472 4474
4473 4475 @file_pattern.setter
4474 4476 def file_pattern(self, value):
4475 4477 self._validate_pattern(value)
4476 4478 self._file_pattern = value or '*'
4477 4479
4478 4480 def matches(self, source_branch, target_branch, files_changed):
4479 4481 """
4480 4482 Check if this review rule matches a branch/files in a pull request
4481 4483
4482 4484 :param source_branch: source branch name for the commit
4483 4485 :param target_branch: target branch name for the commit
4484 4486 :param files_changed: list of file paths changed in the pull request
4485 4487 """
4486 4488
4487 4489 source_branch = source_branch or ''
4488 4490 target_branch = target_branch or ''
4489 4491 files_changed = files_changed or []
4490 4492
4491 4493 branch_matches = True
4492 4494 if source_branch or target_branch:
4493 4495 if self.source_branch_pattern == '*':
4494 4496 source_branch_match = True
4495 4497 else:
4496 4498 if self.source_branch_pattern.startswith('re:'):
4497 4499 source_pattern = self.source_branch_pattern[3:]
4498 4500 else:
4499 4501 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4500 4502 source_branch_regex = re.compile(source_pattern)
4501 4503 source_branch_match = bool(source_branch_regex.search(source_branch))
4502 4504 if self.target_branch_pattern == '*':
4503 4505 target_branch_match = True
4504 4506 else:
4505 4507 if self.target_branch_pattern.startswith('re:'):
4506 4508 target_pattern = self.target_branch_pattern[3:]
4507 4509 else:
4508 4510 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4509 4511 target_branch_regex = re.compile(target_pattern)
4510 4512 target_branch_match = bool(target_branch_regex.search(target_branch))
4511 4513
4512 4514 branch_matches = source_branch_match and target_branch_match
4513 4515
4514 4516 files_matches = True
4515 4517 if self.file_pattern != '*':
4516 4518 files_matches = False
4517 4519 if self.file_pattern.startswith('re:'):
4518 4520 file_pattern = self.file_pattern[3:]
4519 4521 else:
4520 4522 file_pattern = glob2re(self.file_pattern)
4521 4523 file_regex = re.compile(file_pattern)
4522 4524 for filename in files_changed:
4523 4525 if file_regex.search(filename):
4524 4526 files_matches = True
4525 4527 break
4526 4528
4527 4529 return branch_matches and files_matches
4528 4530
4529 4531 @property
4530 4532 def review_users(self):
4531 4533 """ Returns the users which this rule applies to """
4532 4534
4533 4535 users = collections.OrderedDict()
4534 4536
4535 4537 for rule_user in self.rule_users:
4536 4538 if rule_user.user.active:
4537 4539 if rule_user.user not in users:
4538 4540 users[rule_user.user.username] = {
4539 4541 'user': rule_user.user,
4540 4542 'source': 'user',
4541 4543 'source_data': {},
4542 4544 'data': rule_user.rule_data()
4543 4545 }
4544 4546
4545 4547 for rule_user_group in self.rule_user_groups:
4546 4548 source_data = {
4547 4549 'user_group_id': rule_user_group.users_group.users_group_id,
4548 4550 'name': rule_user_group.users_group.users_group_name,
4549 4551 'members': len(rule_user_group.users_group.members)
4550 4552 }
4551 4553 for member in rule_user_group.users_group.members:
4552 4554 if member.user.active:
4553 4555 key = member.user.username
4554 4556 if key in users:
4555 4557 # skip this member as we have him already
4556 4558 # this prevents from override the "first" matched
4557 4559 # users with duplicates in multiple groups
4558 4560 continue
4559 4561
4560 4562 users[key] = {
4561 4563 'user': member.user,
4562 4564 'source': 'user_group',
4563 4565 'source_data': source_data,
4564 4566 'data': rule_user_group.rule_data()
4565 4567 }
4566 4568
4567 4569 return users
4568 4570
4569 4571 def user_group_vote_rule(self, user_id):
4570 4572
4571 4573 rules = []
4572 4574 if not self.rule_user_groups:
4573 4575 return rules
4574 4576
4575 4577 for user_group in self.rule_user_groups:
4576 4578 user_group_members = [x.user_id for x in user_group.users_group.members]
4577 4579 if user_id in user_group_members:
4578 4580 rules.append(user_group)
4579 4581 return rules
4580 4582
4581 4583 def __repr__(self):
4582 4584 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4583 4585 self.repo_review_rule_id, self.repo)
4584 4586
4585 4587
4586 4588 class ScheduleEntry(Base, BaseModel):
4587 4589 __tablename__ = 'schedule_entries'
4588 4590 __table_args__ = (
4589 4591 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4590 4592 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4591 4593 base_table_args,
4592 4594 )
4593 4595
4594 4596 schedule_types = ['crontab', 'timedelta', 'integer']
4595 4597 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4596 4598
4597 4599 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4598 4600 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4599 4601 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4600 4602
4601 4603 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4602 4604 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4603 4605
4604 4606 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4605 4607 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4606 4608
4607 4609 # task
4608 4610 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4609 4611 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4610 4612 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4611 4613 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4612 4614
4613 4615 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4614 4616 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4615 4617
4616 4618 @hybrid_property
4617 4619 def schedule_type(self):
4618 4620 return self._schedule_type
4619 4621
4620 4622 @schedule_type.setter
4621 4623 def schedule_type(self, val):
4622 4624 if val not in self.schedule_types:
4623 4625 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4624 4626 val, self.schedule_type))
4625 4627
4626 4628 self._schedule_type = val
4627 4629
4628 4630 @classmethod
4629 4631 def get_uid(cls, obj):
4630 4632 args = obj.task_args
4631 4633 kwargs = obj.task_kwargs
4632 4634 if isinstance(args, JsonRaw):
4633 4635 try:
4634 4636 args = json.loads(args)
4635 4637 except ValueError:
4636 4638 args = tuple()
4637 4639
4638 4640 if isinstance(kwargs, JsonRaw):
4639 4641 try:
4640 4642 kwargs = json.loads(kwargs)
4641 4643 except ValueError:
4642 4644 kwargs = dict()
4643 4645
4644 4646 dot_notation = obj.task_dot_notation
4645 4647 val = '.'.join(map(safe_str, [
4646 4648 sorted(dot_notation), args, sorted(kwargs.items())]))
4647 4649 return hashlib.sha1(val).hexdigest()
4648 4650
4649 4651 @classmethod
4650 4652 def get_by_schedule_name(cls, schedule_name):
4651 4653 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4652 4654
4653 4655 @classmethod
4654 4656 def get_by_schedule_id(cls, schedule_id):
4655 4657 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4656 4658
4657 4659 @property
4658 4660 def task(self):
4659 4661 return self.task_dot_notation
4660 4662
4661 4663 @property
4662 4664 def schedule(self):
4663 4665 from rhodecode.lib.celerylib.utils import raw_2_schedule
4664 4666 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4665 4667 return schedule
4666 4668
4667 4669 @property
4668 4670 def args(self):
4669 4671 try:
4670 4672 return list(self.task_args or [])
4671 4673 except ValueError:
4672 4674 return list()
4673 4675
4674 4676 @property
4675 4677 def kwargs(self):
4676 4678 try:
4677 4679 return dict(self.task_kwargs or {})
4678 4680 except ValueError:
4679 4681 return dict()
4680 4682
4681 4683 def _as_raw(self, val):
4682 4684 if hasattr(val, 'de_coerce'):
4683 4685 val = val.de_coerce()
4684 4686 if val:
4685 4687 val = json.dumps(val)
4686 4688
4687 4689 return val
4688 4690
4689 4691 @property
4690 4692 def schedule_definition_raw(self):
4691 4693 return self._as_raw(self.schedule_definition)
4692 4694
4693 4695 @property
4694 4696 def args_raw(self):
4695 4697 return self._as_raw(self.task_args)
4696 4698
4697 4699 @property
4698 4700 def kwargs_raw(self):
4699 4701 return self._as_raw(self.task_kwargs)
4700 4702
4701 4703 def __repr__(self):
4702 4704 return '<DB:ScheduleEntry({}:{})>'.format(
4703 4705 self.schedule_entry_id, self.schedule_name)
4704 4706
4705 4707
4706 4708 @event.listens_for(ScheduleEntry, 'before_update')
4707 4709 def update_task_uid(mapper, connection, target):
4708 4710 target.task_uid = ScheduleEntry.get_uid(target)
4709 4711
4710 4712
4711 4713 @event.listens_for(ScheduleEntry, 'before_insert')
4712 4714 def set_task_uid(mapper, connection, target):
4713 4715 target.task_uid = ScheduleEntry.get_uid(target)
4714 4716
4715 4717
4716 4718 class _BaseBranchPerms(BaseModel):
4717 4719 @classmethod
4718 4720 def compute_hash(cls, value):
4719 4721 return sha1_safe(value)
4720 4722
4721 4723 @hybrid_property
4722 4724 def branch_pattern(self):
4723 4725 return self._branch_pattern or '*'
4724 4726
4725 4727 @hybrid_property
4726 4728 def branch_hash(self):
4727 4729 return self._branch_hash
4728 4730
4729 4731 def _validate_glob(self, value):
4730 4732 re.compile('^' + glob2re(value) + '$')
4731 4733
4732 4734 @branch_pattern.setter
4733 4735 def branch_pattern(self, value):
4734 4736 self._validate_glob(value)
4735 4737 self._branch_pattern = value or '*'
4736 4738 # set the Hash when setting the branch pattern
4737 4739 self._branch_hash = self.compute_hash(self._branch_pattern)
4738 4740
4739 4741 def matches(self, branch):
4740 4742 """
4741 4743 Check if this the branch matches entry
4742 4744
4743 4745 :param branch: branch name for the commit
4744 4746 """
4745 4747
4746 4748 branch = branch or ''
4747 4749
4748 4750 branch_matches = True
4749 4751 if branch:
4750 4752 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4751 4753 branch_matches = bool(branch_regex.search(branch))
4752 4754
4753 4755 return branch_matches
4754 4756
4755 4757
4756 4758 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
4757 4759 __tablename__ = 'user_to_repo_branch_permissions'
4758 4760 __table_args__ = (
4759 4761 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4760 4762 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4761 4763 )
4762 4764
4763 4765 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4764 4766
4765 4767 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4766 4768 repo = relationship('Repository', backref='user_branch_perms')
4767 4769
4768 4770 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4769 4771 permission = relationship('Permission')
4770 4772
4771 4773 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
4772 4774 user_repo_to_perm = relationship('UserRepoToPerm')
4773 4775
4774 4776 rule_order = Column('rule_order', Integer(), nullable=False)
4775 4777 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4776 4778 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4777 4779
4778 4780 def __unicode__(self):
4779 4781 return u'<UserBranchPermission(%s => %r)>' % (
4780 4782 self.user_repo_to_perm, self.branch_pattern)
4781 4783
4782 4784
4783 4785 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
4784 4786 __tablename__ = 'user_group_to_repo_branch_permissions'
4785 4787 __table_args__ = (
4786 4788 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4787 4789 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4788 4790 )
4789 4791
4790 4792 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4791 4793
4792 4794 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4793 4795 repo = relationship('Repository', backref='user_group_branch_perms')
4794 4796
4795 4797 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4796 4798 permission = relationship('Permission')
4797 4799
4798 4800 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
4799 4801 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
4800 4802
4801 4803 rule_order = Column('rule_order', Integer(), nullable=False)
4802 4804 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4803 4805 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4804 4806
4805 4807 def __unicode__(self):
4806 4808 return u'<UserBranchPermission(%s => %r)>' % (
4807 4809 self.user_group_repo_to_perm, self.branch_pattern)
4808 4810
4809 4811
4810 4812 class UserBookmark(Base, BaseModel):
4811 4813 __tablename__ = 'user_bookmarks'
4812 4814 __table_args__ = (
4813 4815 UniqueConstraint('user_id', 'bookmark_repo_id'),
4814 4816 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
4815 4817 UniqueConstraint('user_id', 'bookmark_position'),
4816 4818 base_table_args
4817 4819 )
4818 4820
4819 4821 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
4820 4822 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
4821 4823 position = Column("bookmark_position", Integer(), nullable=False)
4822 4824 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
4823 4825 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
4824 4826 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4825 4827
4826 4828 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
4827 4829 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
4828 4830
4829 4831 user = relationship("User")
4830 4832
4831 4833 repository = relationship("Repository")
4832 4834 repository_group = relationship("RepoGroup")
4833 4835
4834 4836 @classmethod
4835 4837 def get_by_position_for_user(cls, position, user_id):
4836 4838 return cls.query() \
4837 4839 .filter(UserBookmark.user_id == user_id) \
4838 4840 .filter(UserBookmark.position == position).scalar()
4839 4841
4840 4842 @classmethod
4841 4843 def get_bookmarks_for_user(cls, user_id):
4842 4844 return cls.query() \
4843 4845 .filter(UserBookmark.user_id == user_id) \
4844 4846 .options(joinedload(UserBookmark.repository)) \
4845 4847 .options(joinedload(UserBookmark.repository_group)) \
4846 4848 .order_by(UserBookmark.position.asc()) \
4847 4849 .all()
4848 4850
4849 4851 def __unicode__(self):
4850 4852 return u'<UserBookmark(%d @ %r)>' % (self.position, self.redirect_url)
4851 4853
4852 4854
4853 4855 class FileStore(Base, BaseModel):
4854 4856 __tablename__ = 'file_store'
4855 4857 __table_args__ = (
4856 4858 base_table_args
4857 4859 )
4858 4860
4859 4861 file_store_id = Column('file_store_id', Integer(), primary_key=True)
4860 4862 file_uid = Column('file_uid', String(1024), nullable=False)
4861 4863 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
4862 4864 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
4863 4865 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
4864 4866
4865 4867 # sha256 hash
4866 4868 file_hash = Column('file_hash', String(512), nullable=False)
4867 4869 file_size = Column('file_size', Integer(), nullable=False)
4868 4870
4869 4871 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4870 4872 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
4871 4873 accessed_count = Column('accessed_count', Integer(), default=0)
4872 4874
4873 4875 enabled = Column('enabled', Boolean(), nullable=False, default=True)
4874 4876
4875 4877 # if repo/repo_group reference is set, check for permissions
4876 4878 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
4877 4879
4878 4880 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
4879 4881 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
4880 4882
4881 4883 # scope limited to user, which requester have access to
4882 4884 scope_user_id = Column(
4883 4885 'scope_user_id', Integer(), ForeignKey('users.user_id'),
4884 4886 nullable=True, unique=None, default=None)
4885 4887 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
4886 4888
4887 4889 # scope limited to user group, which requester have access to
4888 4890 scope_user_group_id = Column(
4889 4891 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
4890 4892 nullable=True, unique=None, default=None)
4891 4893 user_group = relationship('UserGroup', lazy='joined')
4892 4894
4893 4895 # scope limited to repo, which requester have access to
4894 4896 scope_repo_id = Column(
4895 4897 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4896 4898 nullable=True, unique=None, default=None)
4897 4899 repo = relationship('Repository', lazy='joined')
4898 4900
4899 4901 # scope limited to repo group, which requester have access to
4900 4902 scope_repo_group_id = Column(
4901 4903 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
4902 4904 nullable=True, unique=None, default=None)
4903 4905 repo_group = relationship('RepoGroup', lazy='joined')
4904 4906
4905 4907 @classmethod
4906 4908 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
4907 4909 file_description='', enabled=True, check_acl=True,
4908 4910 user_id=None, scope_repo_id=None, scope_repo_group_id=None):
4909 4911
4910 4912 store_entry = FileStore()
4911 4913 store_entry.file_uid = file_uid
4912 4914 store_entry.file_display_name = file_display_name
4913 4915 store_entry.file_org_name = filename
4914 4916 store_entry.file_size = file_size
4915 4917 store_entry.file_hash = file_hash
4916 4918 store_entry.file_description = file_description
4917 4919
4918 4920 store_entry.check_acl = check_acl
4919 4921 store_entry.enabled = enabled
4920 4922
4921 4923 store_entry.user_id = user_id
4922 4924 store_entry.scope_repo_id = scope_repo_id
4923 4925 store_entry.scope_repo_group_id = scope_repo_group_id
4924 4926 return store_entry
4925 4927
4926 4928 @classmethod
4927 4929 def bump_access_counter(cls, file_uid, commit=True):
4928 4930 FileStore().query()\
4929 4931 .filter(FileStore.file_uid == file_uid)\
4930 4932 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
4931 4933 FileStore.accessed_on: datetime.datetime.now()})
4932 4934 if commit:
4933 4935 Session().commit()
4934 4936
4935 4937 def __repr__(self):
4936 4938 return '<FileStore({})>'.format(self.file_store_id)
4937 4939
4938 4940
4939 4941 class DbMigrateVersion(Base, BaseModel):
4940 4942 __tablename__ = 'db_migrate_version'
4941 4943 __table_args__ = (
4942 4944 base_table_args,
4943 4945 )
4944 4946
4945 4947 repository_id = Column('repository_id', String(250), primary_key=True)
4946 4948 repository_path = Column('repository_path', Text)
4947 4949 version = Column('version', Integer)
4948 4950
4949 4951 @classmethod
4950 4952 def set_version(cls, version):
4951 4953 """
4952 4954 Helper for forcing a different version, usually for debugging purposes via ishell.
4953 4955 """
4954 4956 ver = DbMigrateVersion.query().first()
4955 4957 ver.version = version
4956 4958 Session().commit()
4957 4959
4958 4960
4959 4961 class DbSession(Base, BaseModel):
4960 4962 __tablename__ = 'db_session'
4961 4963 __table_args__ = (
4962 4964 base_table_args,
4963 4965 )
4964 4966
4965 4967 def __repr__(self):
4966 4968 return '<DB:DbSession({})>'.format(self.id)
4967 4969
4968 4970 id = Column('id', Integer())
4969 4971 namespace = Column('namespace', String(255), primary_key=True)
4970 4972 accessed = Column('accessed', DateTime, nullable=False)
4971 4973 created = Column('created', DateTime, nullable=False)
4972 4974 data = Column('data', PickleType, nullable=False)
@@ -1,76 +1,133 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.encrypt import (
24 24 AESCipher, SignatureVerificationError, InvalidDecryptedValue)
25 from rhodecode.lib.encrypt2 import (Encryptor, InvalidToken)
25 26
26 27
27 28 class TestEncryptModule(object):
28 29
29 30 @pytest.mark.parametrize(
30 31 "key, text",
31 32 [
32 33 ('a', 'short'),
33 34 ('a'*64, 'too long(trimmed to 32)'),
34 35 ('a'*32, 'just enough'),
35 36 ('Δ…Δ‡Δ™Δ‡Δ™', 'non asci'),
36 37 ('$asa$asa', 'special $ used'),
37 38 ]
38 39 )
39 40 def test_encryption(self, key, text):
40 41 enc = AESCipher(key).encrypt(text)
41 42 assert AESCipher(key).decrypt(enc) == text
42 43
43 44 def test_encryption_with_hmac(self):
44 45 key = 'secret'
45 46 text = 'ihatemysql'
46 47 enc = AESCipher(key, hmac=True).encrypt(text)
47 48 assert AESCipher(key, hmac=True).decrypt(enc) == text
48 49
49 50 def test_encryption_with_hmac_with_bad_key(self):
50 51 key = 'secretstring'
51 52 text = 'ihatemysql'
52 53 enc = AESCipher(key, hmac=True).encrypt(text)
53 54
54 55 with pytest.raises(SignatureVerificationError) as e:
55 56 assert AESCipher('differentsecret', hmac=True).decrypt(enc) == ''
56 57
57 58 assert 'Encryption signature verification failed' in str(e)
58 59
59 60 def test_encryption_with_hmac_with_bad_data(self):
60 61 key = 'secret'
61 62 text = 'ihatemysql'
62 63 enc = AESCipher(key, hmac=True).encrypt(text)
63 64 enc = 'xyz' + enc[3:]
64 65 with pytest.raises(SignatureVerificationError) as e:
65 66 assert AESCipher(key, hmac=True).decrypt(enc) == text
66 67
67 68 assert 'Encryption signature verification failed' in str(e)
68 69
69 70 def test_encryption_with_hmac_with_bad_key_not_strict(self):
70 71 key = 'secretstring'
71 72 text = 'ihatemysql'
72 73 enc = AESCipher(key, hmac=True).encrypt(text)
73 74
74 75 assert isinstance(AESCipher(
75 76 'differentsecret', hmac=True, strict_verification=False
76 77 ).decrypt(enc), InvalidDecryptedValue)
78
79
80 class TestEncryptModule2(object):
81
82 @pytest.mark.parametrize(
83 "key, text",
84 [
85 ('a', 'short'),
86 ('a'*64, 'too long(trimmed to 32)'),
87 ('a'*32, 'just enough'),
88 ('Δ…Δ‡Δ™Δ‡Δ™', 'non asci'),
89 ('$asa$asa', 'special $ used'),
90 ]
91 )
92 def test_encryption(self, key, text):
93 enc = Encryptor(key).encrypt(text)
94 assert Encryptor(key).decrypt(enc) == text
95
96 def test_encryption_with_bad_key(self):
97 key = 'secretstring'
98 text = 'ihatemysql'
99 enc = Encryptor(key).encrypt(text)
100
101 assert Encryptor('differentsecret').decrypt(enc) == ''
102
103 def test_encryption_with_bad_key_raises(self):
104 key = 'secretstring'
105 text = 'ihatemysql'
106 enc = Encryptor(key).encrypt(text)
107
108 with pytest.raises(InvalidToken) as e:
109 Encryptor('differentsecret').decrypt(enc, safe=False)
110
111 assert 'InvalidToken' in str(e)
112
113 def test_encryption_with_bad_format_data(self):
114 key = 'secret'
115 text = 'ihatemysql'
116 enc = Encryptor(key).encrypt(text)
117 enc = '$xyz' + enc[3:]
118
119 with pytest.raises(ValueError) as e:
120 Encryptor(key).decrypt(enc, safe=False)
121
122 assert 'Encrypted Data has invalid format' in str(e)
123
124 def test_encryption_with_bad_data(self):
125 key = 'secret'
126 text = 'ihatemysql'
127 enc = Encryptor(key).encrypt(text)
128 enc = enc[:-5]
129
130 with pytest.raises(InvalidToken) as e:
131 Encryptor(key).decrypt(enc, safe=False)
132
133 assert 'InvalidToken' in str(e)
General Comments 0
You need to be logged in to leave comments. Login now