##// END OF EJS Templates
caches: use dogpile for sql_cache_short region.
marcink -
r2883:f2837b35 default
parent child Browse files
Show More
@@ -1,738 +1,738 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 use = egg:waitress#main
54 use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 threads = 5
56 threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 max_request_body_size = 107374182400
58 max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 asyncore_use_poll = true
61 asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 #use = egg:gunicorn#main
69 #use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 #workers = 2
74 #workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommended to be at 1
76 ## generally recommended to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 #proc_name = rhodecode
79 #proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 #worker_class = gevent
82 #worker_class = gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 #max_requests = 1000
87 #max_requests = 1000
88 #max_requests_jitter = 30
88 #max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 #timeout = 21600
91 #timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 # During development the we want to have the debug toolbar enabled
111 # During development the we want to have the debug toolbar enabled
112 pyramid.includes =
112 pyramid.includes =
113 pyramid_debugtoolbar
113 pyramid_debugtoolbar
114 rhodecode.lib.middleware.request_wrapper
114 rhodecode.lib.middleware.request_wrapper
115
115
116 pyramid.reload_templates = true
116 pyramid.reload_templates = true
117
117
118 debugtoolbar.hosts = 0.0.0.0/0
118 debugtoolbar.hosts = 0.0.0.0/0
119 debugtoolbar.exclude_prefixes =
119 debugtoolbar.exclude_prefixes =
120 /css
120 /css
121 /fonts
121 /fonts
122 /images
122 /images
123 /js
123 /js
124
124
125 ## RHODECODE PLUGINS ##
125 ## RHODECODE PLUGINS ##
126 rhodecode.includes =
126 rhodecode.includes =
127 rhodecode.api
127 rhodecode.api
128
128
129
129
130 # api prefix url
130 # api prefix url
131 rhodecode.api.url = /_admin/api
131 rhodecode.api.url = /_admin/api
132
132
133
133
134 ## END RHODECODE PLUGINS ##
134 ## END RHODECODE PLUGINS ##
135
135
136 ## encryption key used to encrypt social plugin tokens,
136 ## encryption key used to encrypt social plugin tokens,
137 ## remote_urls with credentials etc, if not set it defaults to
137 ## remote_urls with credentials etc, if not set it defaults to
138 ## `beaker.session.secret`
138 ## `beaker.session.secret`
139 #rhodecode.encrypted_values.secret =
139 #rhodecode.encrypted_values.secret =
140
140
141 ## decryption strict mode (enabled by default). It controls if decryption raises
141 ## decryption strict mode (enabled by default). It controls if decryption raises
142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 #rhodecode.encrypted_values.strict = false
143 #rhodecode.encrypted_values.strict = false
144
144
145 ## return gzipped responses from Rhodecode (static files/application)
145 ## return gzipped responses from Rhodecode (static files/application)
146 gzip_responses = false
146 gzip_responses = false
147
147
148 ## autogenerate javascript routes file on startup
148 ## autogenerate javascript routes file on startup
149 generate_js_files = false
149 generate_js_files = false
150
150
151 ## Optional Languages
151 ## Optional Languages
152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
153 lang = en
153 lang = en
154
154
155 ## perform a full repository scan on each server start, this should be
155 ## perform a full repository scan on each server start, this should be
156 ## set to false after first startup, to allow faster server restarts.
156 ## set to false after first startup, to allow faster server restarts.
157 startup.import_repos = false
157 startup.import_repos = false
158
158
159 ## Uncomment and set this path to use archive download cache.
159 ## Uncomment and set this path to use archive download cache.
160 ## Once enabled, generated archives will be cached at this location
160 ## Once enabled, generated archives will be cached at this location
161 ## and served from the cache during subsequent requests for the same archive of
161 ## and served from the cache during subsequent requests for the same archive of
162 ## the repository.
162 ## the repository.
163 #archive_cache_dir = /tmp/tarballcache
163 #archive_cache_dir = /tmp/tarballcache
164
164
165 ## URL at which the application is running. This is used for bootstraping
165 ## URL at which the application is running. This is used for bootstraping
166 ## requests in context when no web request is available. Used in ishell, or
166 ## requests in context when no web request is available. Used in ishell, or
167 ## SSH calls. Set this for events to receive proper url for SSH calls.
167 ## SSH calls. Set this for events to receive proper url for SSH calls.
168 app.base_url = http://rhodecode.local
168 app.base_url = http://rhodecode.local
169
169
170 ## change this to unique ID for security
170 ## change this to unique ID for security
171 app_instance_uuid = rc-production
171 app_instance_uuid = rc-production
172
172
173 ## cut off limit for large diffs (size in bytes). If overall diff size on
173 ## cut off limit for large diffs (size in bytes). If overall diff size on
174 ## commit, or pull request exceeds this limit this diff will be displayed
174 ## commit, or pull request exceeds this limit this diff will be displayed
175 ## partially. E.g 512000 == 512Kb
175 ## partially. E.g 512000 == 512Kb
176 cut_off_limit_diff = 512000
176 cut_off_limit_diff = 512000
177
177
178 ## cut off limit for large files inside diffs (size in bytes). Each individual
178 ## cut off limit for large files inside diffs (size in bytes). Each individual
179 ## file inside diff which exceeds this limit will be displayed partially.
179 ## file inside diff which exceeds this limit will be displayed partially.
180 ## E.g 128000 == 128Kb
180 ## E.g 128000 == 128Kb
181 cut_off_limit_file = 128000
181 cut_off_limit_file = 128000
182
182
183 ## use cache version of scm repo everywhere
183 ## use cache version of scm repo everywhere
184 vcs_full_cache = true
184 vcs_full_cache = true
185
185
186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
187 ## Normally this is controlled by proper http flags sent from http server
187 ## Normally this is controlled by proper http flags sent from http server
188 force_https = false
188 force_https = false
189
189
190 ## use Strict-Transport-Security headers
190 ## use Strict-Transport-Security headers
191 use_htsts = false
191 use_htsts = false
192
192
193 ## git rev filter option, --all is the default filter, if you need to
193 ## git rev filter option, --all is the default filter, if you need to
194 ## hide all refs in changelog switch this to --branches --tags
194 ## hide all refs in changelog switch this to --branches --tags
195 git_rev_filter = --branches --tags
195 git_rev_filter = --branches --tags
196
196
197 # Set to true if your repos are exposed using the dumb protocol
197 # Set to true if your repos are exposed using the dumb protocol
198 git_update_server_info = false
198 git_update_server_info = false
199
199
200 ## RSS/ATOM feed options
200 ## RSS/ATOM feed options
201 rss_cut_off_limit = 256000
201 rss_cut_off_limit = 256000
202 rss_items_per_page = 10
202 rss_items_per_page = 10
203 rss_include_diff = false
203 rss_include_diff = false
204
204
205 ## gist URL alias, used to create nicer urls for gist. This should be an
205 ## gist URL alias, used to create nicer urls for gist. This should be an
206 ## url that does rewrites to _admin/gists/{gistid}.
206 ## url that does rewrites to _admin/gists/{gistid}.
207 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
207 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
208 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
208 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
209 gist_alias_url =
209 gist_alias_url =
210
210
211 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
211 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
212 ## used for access.
212 ## used for access.
213 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
213 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
214 ## came from the the logged in user who own this authentication token.
214 ## came from the the logged in user who own this authentication token.
215 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
215 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
216 ## authentication token. Such view would be only accessible when used together
216 ## authentication token. Such view would be only accessible when used together
217 ## with this authentication token
217 ## with this authentication token
218 ##
218 ##
219 ## list of all views can be found under `/_admin/permissions/auth_token_access`
219 ## list of all views can be found under `/_admin/permissions/auth_token_access`
220 ## The list should be "," separated and on a single line.
220 ## The list should be "," separated and on a single line.
221 ##
221 ##
222 ## Most common views to enable:
222 ## Most common views to enable:
223 # RepoCommitsView:repo_commit_download
223 # RepoCommitsView:repo_commit_download
224 # RepoCommitsView:repo_commit_patch
224 # RepoCommitsView:repo_commit_patch
225 # RepoCommitsView:repo_commit_raw
225 # RepoCommitsView:repo_commit_raw
226 # RepoCommitsView:repo_commit_raw@TOKEN
226 # RepoCommitsView:repo_commit_raw@TOKEN
227 # RepoFilesView:repo_files_diff
227 # RepoFilesView:repo_files_diff
228 # RepoFilesView:repo_archivefile
228 # RepoFilesView:repo_archivefile
229 # RepoFilesView:repo_file_raw
229 # RepoFilesView:repo_file_raw
230 # GistView:*
230 # GistView:*
231 api_access_controllers_whitelist =
231 api_access_controllers_whitelist =
232
232
233 ## default encoding used to convert from and to unicode
233 ## default encoding used to convert from and to unicode
234 ## can be also a comma separated list of encoding in case of mixed encodings
234 ## can be also a comma separated list of encoding in case of mixed encodings
235 default_encoding = UTF-8
235 default_encoding = UTF-8
236
236
237 ## instance-id prefix
237 ## instance-id prefix
238 ## a prefix key for this instance used for cache invalidation when running
238 ## a prefix key for this instance used for cache invalidation when running
239 ## multiple instances of rhodecode, make sure it's globally unique for
239 ## multiple instances of rhodecode, make sure it's globally unique for
240 ## all running rhodecode instances. Leave empty if you don't use it
240 ## all running rhodecode instances. Leave empty if you don't use it
241 instance_id =
241 instance_id =
242
242
243 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
243 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
244 ## of an authentication plugin also if it is disabled by it's settings.
244 ## of an authentication plugin also if it is disabled by it's settings.
245 ## This could be useful if you are unable to log in to the system due to broken
245 ## This could be useful if you are unable to log in to the system due to broken
246 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
246 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
247 ## module to log in again and fix the settings.
247 ## module to log in again and fix the settings.
248 ##
248 ##
249 ## Available builtin plugin IDs (hash is part of the ID):
249 ## Available builtin plugin IDs (hash is part of the ID):
250 ## egg:rhodecode-enterprise-ce#rhodecode
250 ## egg:rhodecode-enterprise-ce#rhodecode
251 ## egg:rhodecode-enterprise-ce#pam
251 ## egg:rhodecode-enterprise-ce#pam
252 ## egg:rhodecode-enterprise-ce#ldap
252 ## egg:rhodecode-enterprise-ce#ldap
253 ## egg:rhodecode-enterprise-ce#jasig_cas
253 ## egg:rhodecode-enterprise-ce#jasig_cas
254 ## egg:rhodecode-enterprise-ce#headers
254 ## egg:rhodecode-enterprise-ce#headers
255 ## egg:rhodecode-enterprise-ce#crowd
255 ## egg:rhodecode-enterprise-ce#crowd
256 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
256 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
257
257
258 ## alternative return HTTP header for failed authentication. Default HTTP
258 ## alternative return HTTP header for failed authentication. Default HTTP
259 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
259 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
260 ## handling that causing a series of failed authentication calls.
260 ## handling that causing a series of failed authentication calls.
261 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
261 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
262 ## This will be served instead of default 401 on bad authnetication
262 ## This will be served instead of default 401 on bad authnetication
263 auth_ret_code =
263 auth_ret_code =
264
264
265 ## use special detection method when serving auth_ret_code, instead of serving
265 ## use special detection method when serving auth_ret_code, instead of serving
266 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
266 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
267 ## and then serve auth_ret_code to clients
267 ## and then serve auth_ret_code to clients
268 auth_ret_code_detection = false
268 auth_ret_code_detection = false
269
269
270 ## locking return code. When repository is locked return this HTTP code. 2XX
270 ## locking return code. When repository is locked return this HTTP code. 2XX
271 ## codes don't break the transactions while 4XX codes do
271 ## codes don't break the transactions while 4XX codes do
272 lock_ret_code = 423
272 lock_ret_code = 423
273
273
274 ## allows to change the repository location in settings page
274 ## allows to change the repository location in settings page
275 allow_repo_location_change = true
275 allow_repo_location_change = true
276
276
277 ## allows to setup custom hooks in settings page
277 ## allows to setup custom hooks in settings page
278 allow_custom_hooks_settings = true
278 allow_custom_hooks_settings = true
279
279
280 ## generated license token, goto license page in RhodeCode settings to obtain
280 ## generated license token, goto license page in RhodeCode settings to obtain
281 ## new token
281 ## new token
282 license_token =
282 license_token =
283
283
284 ## supervisor connection uri, for managing supervisor and logs.
284 ## supervisor connection uri, for managing supervisor and logs.
285 supervisor.uri =
285 supervisor.uri =
286 ## supervisord group name/id we only want this RC instance to handle
286 ## supervisord group name/id we only want this RC instance to handle
287 supervisor.group_id = dev
287 supervisor.group_id = dev
288
288
289 ## Display extended labs settings
289 ## Display extended labs settings
290 labs_settings_active = true
290 labs_settings_active = true
291
291
292 ####################################
292 ####################################
293 ### CELERY CONFIG ####
293 ### CELERY CONFIG ####
294 ####################################
294 ####################################
295 ## run: /path/to/celery worker \
295 ## run: /path/to/celery worker \
296 ## -E --beat --app rhodecode.lib.celerylib.loader \
296 ## -E --beat --app rhodecode.lib.celerylib.loader \
297 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
297 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
298 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
298 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
299
299
300 use_celery = false
300 use_celery = false
301
301
302 ## connection url to the message broker (default rabbitmq)
302 ## connection url to the message broker (default rabbitmq)
303 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
303 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
304
304
305 ## maximum tasks to execute before worker restart
305 ## maximum tasks to execute before worker restart
306 celery.max_tasks_per_child = 100
306 celery.max_tasks_per_child = 100
307
307
308 ## tasks will never be sent to the queue, but executed locally instead.
308 ## tasks will never be sent to the queue, but executed locally instead.
309 celery.task_always_eager = false
309 celery.task_always_eager = false
310
310
311 #####################################
311 #####################################
312 ### DOGPILE CACHE ####
312 ### DOGPILE CACHE ####
313 #####################################
313 #####################################
314 ## Default cache dir for caches. Putting this into a ramdisk
314 ## Default cache dir for caches. Putting this into a ramdisk
315 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
315 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
316 ## of space
316 ## of space
317 cache_dir = /tmp/rcdev/data
317 cache_dir = /tmp/rcdev/data
318
318
319 ## cache settings for permission tree, auth TTL.
319 ## cache settings for permission tree, auth TTL.
320 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
320 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
321 rc_cache.cache_perms.expiration_time = 300
321 rc_cache.cache_perms.expiration_time = 300
322 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
322 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
323
323
324 ## redis backend with distributed locks
324 ## redis backend with distributed locks
325 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
325 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
326 #rc_cache.cache_perms.expiration_time = 300
326 #rc_cache.cache_perms.expiration_time = 300
327 #rc_cache.cache_perms.arguments.host = localhost
327 #rc_cache.cache_perms.arguments.host = localhost
328 #rc_cache.cache_perms.arguments.port = 6379
328 #rc_cache.cache_perms.arguments.port = 6379
329 #rc_cache.cache_perms.arguments.db = 0
329 #rc_cache.cache_perms.arguments.db = 0
330 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
330 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
331 #rc_cache.cache_perms.arguments.distributed_lock = true
331 #rc_cache.cache_perms.arguments.distributed_lock = true
332
332
333
333
334 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
334 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
335 rc_cache.cache_repo.expiration_time = 2592000
335 rc_cache.cache_repo.expiration_time = 2592000
336 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
336 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
337
337
338 ## redis backend with distributed locks
338 ## redis backend with distributed locks
339 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
339 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
340 #rc_cache.cache_repo.expiration_time = 2592000
340 #rc_cache.cache_repo.expiration_time = 2592000
341 ## this needs to be greater then expiration_time
341 ## this needs to be greater then expiration_time
342 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
342 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
343 #rc_cache.cache_repo.arguments.host = localhost
343 #rc_cache.cache_repo.arguments.host = localhost
344 #rc_cache.cache_repo.arguments.port = 6379
344 #rc_cache.cache_repo.arguments.port = 6379
345 #rc_cache.cache_repo.arguments.db = 1
345 #rc_cache.cache_repo.arguments.db = 1
346 #rc_cache.cache_repo.arguments.distributed_lock = true
346 #rc_cache.cache_repo.arguments.distributed_lock = true
347
347
348 ## cache settings for SQL queries
349 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
350 rc_cache.sql_cache_short.expiration_time = 30
351
348
352
349 ####################################
353 ####################################
350 ### BEAKER CACHE ####
354 ### BEAKER CACHE ####
351 ####################################
355 ####################################
352
356
353 ## locking and default file storage for Beaker. Putting this into a ramdisk
357 ## locking and default file storage for Beaker. Putting this into a ramdisk
354 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
358 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
355 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
359 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
356 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
360 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
357
361
358 beaker.cache.regions = long_term, sql_cache_short
362 beaker.cache.regions = long_term
359
363
360 beaker.cache.long_term.type = memorylru_base
364 beaker.cache.long_term.type = memorylru_base
361 beaker.cache.long_term.expire = 172800
365 beaker.cache.long_term.expire = 172800
362 beaker.cache.long_term.key_length = 256
366 beaker.cache.long_term.key_length = 256
363
367
364 beaker.cache.sql_cache_short.type = memorylru_base
365 beaker.cache.sql_cache_short.expire = 10
366 beaker.cache.sql_cache_short.key_length = 256
367
368
368
369 ####################################
369 ####################################
370 ### BEAKER SESSION ####
370 ### BEAKER SESSION ####
371 ####################################
371 ####################################
372
372
373 ## .session.type is type of storage options for the session, current allowed
373 ## .session.type is type of storage options for the session, current allowed
374 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
374 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
375 beaker.session.type = file
375 beaker.session.type = file
376 beaker.session.data_dir = %(here)s/data/sessions
376 beaker.session.data_dir = %(here)s/data/sessions
377
377
378 ## db based session, fast, and allows easy management over logged in users
378 ## db based session, fast, and allows easy management over logged in users
379 #beaker.session.type = ext:database
379 #beaker.session.type = ext:database
380 #beaker.session.table_name = db_session
380 #beaker.session.table_name = db_session
381 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
381 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
382 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
382 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
383 #beaker.session.sa.pool_recycle = 3600
383 #beaker.session.sa.pool_recycle = 3600
384 #beaker.session.sa.echo = false
384 #beaker.session.sa.echo = false
385
385
386 beaker.session.key = rhodecode
386 beaker.session.key = rhodecode
387 beaker.session.secret = develop-rc-uytcxaz
387 beaker.session.secret = develop-rc-uytcxaz
388 beaker.session.lock_dir = %(here)s/data/sessions/lock
388 beaker.session.lock_dir = %(here)s/data/sessions/lock
389
389
390 ## Secure encrypted cookie. Requires AES and AES python libraries
390 ## Secure encrypted cookie. Requires AES and AES python libraries
391 ## you must disable beaker.session.secret to use this
391 ## you must disable beaker.session.secret to use this
392 #beaker.session.encrypt_key = key_for_encryption
392 #beaker.session.encrypt_key = key_for_encryption
393 #beaker.session.validate_key = validation_key
393 #beaker.session.validate_key = validation_key
394
394
395 ## sets session as invalid(also logging out user) if it haven not been
395 ## sets session as invalid(also logging out user) if it haven not been
396 ## accessed for given amount of time in seconds
396 ## accessed for given amount of time in seconds
397 beaker.session.timeout = 2592000
397 beaker.session.timeout = 2592000
398 beaker.session.httponly = true
398 beaker.session.httponly = true
399 ## Path to use for the cookie. Set to prefix if you use prefix middleware
399 ## Path to use for the cookie. Set to prefix if you use prefix middleware
400 #beaker.session.cookie_path = /custom_prefix
400 #beaker.session.cookie_path = /custom_prefix
401
401
402 ## uncomment for https secure cookie
402 ## uncomment for https secure cookie
403 beaker.session.secure = false
403 beaker.session.secure = false
404
404
405 ## auto save the session to not to use .save()
405 ## auto save the session to not to use .save()
406 beaker.session.auto = false
406 beaker.session.auto = false
407
407
408 ## default cookie expiration time in seconds, set to `true` to set expire
408 ## default cookie expiration time in seconds, set to `true` to set expire
409 ## at browser close
409 ## at browser close
410 #beaker.session.cookie_expires = 3600
410 #beaker.session.cookie_expires = 3600
411
411
412 ###################################
412 ###################################
413 ## SEARCH INDEXING CONFIGURATION ##
413 ## SEARCH INDEXING CONFIGURATION ##
414 ###################################
414 ###################################
415 ## Full text search indexer is available in rhodecode-tools under
415 ## Full text search indexer is available in rhodecode-tools under
416 ## `rhodecode-tools index` command
416 ## `rhodecode-tools index` command
417
417
418 ## WHOOSH Backend, doesn't require additional services to run
418 ## WHOOSH Backend, doesn't require additional services to run
419 ## it works good with few dozen repos
419 ## it works good with few dozen repos
420 search.module = rhodecode.lib.index.whoosh
420 search.module = rhodecode.lib.index.whoosh
421 search.location = %(here)s/data/index
421 search.location = %(here)s/data/index
422
422
423 ########################################
423 ########################################
424 ### CHANNELSTREAM CONFIG ####
424 ### CHANNELSTREAM CONFIG ####
425 ########################################
425 ########################################
426 ## channelstream enables persistent connections and live notification
426 ## channelstream enables persistent connections and live notification
427 ## in the system. It's also used by the chat system
427 ## in the system. It's also used by the chat system
428 channelstream.enabled = false
428 channelstream.enabled = false
429
429
430 ## server address for channelstream server on the backend
430 ## server address for channelstream server on the backend
431 channelstream.server = 127.0.0.1:9800
431 channelstream.server = 127.0.0.1:9800
432
432
433 ## location of the channelstream server from outside world
433 ## location of the channelstream server from outside world
434 ## use ws:// for http or wss:// for https. This address needs to be handled
434 ## use ws:// for http or wss:// for https. This address needs to be handled
435 ## by external HTTP server such as Nginx or Apache
435 ## by external HTTP server such as Nginx or Apache
436 ## see nginx/apache configuration examples in our docs
436 ## see nginx/apache configuration examples in our docs
437 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
437 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
438 channelstream.secret = secret
438 channelstream.secret = secret
439 channelstream.history.location = %(here)s/channelstream_history
439 channelstream.history.location = %(here)s/channelstream_history
440
440
441 ## Internal application path that Javascript uses to connect into.
441 ## Internal application path that Javascript uses to connect into.
442 ## If you use proxy-prefix the prefix should be added before /_channelstream
442 ## If you use proxy-prefix the prefix should be added before /_channelstream
443 channelstream.proxy_path = /_channelstream
443 channelstream.proxy_path = /_channelstream
444
444
445
445
446 ###################################
446 ###################################
447 ## APPENLIGHT CONFIG ##
447 ## APPENLIGHT CONFIG ##
448 ###################################
448 ###################################
449
449
450 ## Appenlight is tailored to work with RhodeCode, see
450 ## Appenlight is tailored to work with RhodeCode, see
451 ## http://appenlight.com for details how to obtain an account
451 ## http://appenlight.com for details how to obtain an account
452
452
453 ## appenlight integration enabled
453 ## appenlight integration enabled
454 appenlight = false
454 appenlight = false
455
455
456 appenlight.server_url = https://api.appenlight.com
456 appenlight.server_url = https://api.appenlight.com
457 appenlight.api_key = YOUR_API_KEY
457 appenlight.api_key = YOUR_API_KEY
458 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
458 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
459
459
460 # used for JS client
460 # used for JS client
461 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
461 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
462
462
463 ## TWEAK AMOUNT OF INFO SENT HERE
463 ## TWEAK AMOUNT OF INFO SENT HERE
464
464
465 ## enables 404 error logging (default False)
465 ## enables 404 error logging (default False)
466 appenlight.report_404 = false
466 appenlight.report_404 = false
467
467
468 ## time in seconds after request is considered being slow (default 1)
468 ## time in seconds after request is considered being slow (default 1)
469 appenlight.slow_request_time = 1
469 appenlight.slow_request_time = 1
470
470
471 ## record slow requests in application
471 ## record slow requests in application
472 ## (needs to be enabled for slow datastore recording and time tracking)
472 ## (needs to be enabled for slow datastore recording and time tracking)
473 appenlight.slow_requests = true
473 appenlight.slow_requests = true
474
474
475 ## enable hooking to application loggers
475 ## enable hooking to application loggers
476 appenlight.logging = true
476 appenlight.logging = true
477
477
478 ## minimum log level for log capture
478 ## minimum log level for log capture
479 appenlight.logging.level = WARNING
479 appenlight.logging.level = WARNING
480
480
481 ## send logs only from erroneous/slow requests
481 ## send logs only from erroneous/slow requests
482 ## (saves API quota for intensive logging)
482 ## (saves API quota for intensive logging)
483 appenlight.logging_on_error = false
483 appenlight.logging_on_error = false
484
484
485 ## list of additonal keywords that should be grabbed from environ object
485 ## list of additonal keywords that should be grabbed from environ object
486 ## can be string with comma separated list of words in lowercase
486 ## can be string with comma separated list of words in lowercase
487 ## (by default client will always send following info:
487 ## (by default client will always send following info:
488 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
488 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
489 ## start with HTTP* this list be extended with additional keywords here
489 ## start with HTTP* this list be extended with additional keywords here
490 appenlight.environ_keys_whitelist =
490 appenlight.environ_keys_whitelist =
491
491
492 ## list of keywords that should be blanked from request object
492 ## list of keywords that should be blanked from request object
493 ## can be string with comma separated list of words in lowercase
493 ## can be string with comma separated list of words in lowercase
494 ## (by default client will always blank keys that contain following words
494 ## (by default client will always blank keys that contain following words
495 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
495 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
496 ## this list be extended with additional keywords set here
496 ## this list be extended with additional keywords set here
497 appenlight.request_keys_blacklist =
497 appenlight.request_keys_blacklist =
498
498
499 ## list of namespaces that should be ignores when gathering log entries
499 ## list of namespaces that should be ignores when gathering log entries
500 ## can be string with comma separated list of namespaces
500 ## can be string with comma separated list of namespaces
501 ## (by default the client ignores own entries: appenlight_client.client)
501 ## (by default the client ignores own entries: appenlight_client.client)
502 appenlight.log_namespace_blacklist =
502 appenlight.log_namespace_blacklist =
503
503
504
504
505 ################################################################################
505 ################################################################################
506 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
506 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
507 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
507 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
508 ## execute malicious code after an exception is raised. ##
508 ## execute malicious code after an exception is raised. ##
509 ################################################################################
509 ################################################################################
510 #set debug = false
510 #set debug = false
511
511
512
512
513 ##############
513 ##############
514 ## STYLING ##
514 ## STYLING ##
515 ##############
515 ##############
516 debug_style = true
516 debug_style = true
517
517
518 ###########################################
518 ###########################################
519 ### MAIN RHODECODE DATABASE CONFIG ###
519 ### MAIN RHODECODE DATABASE CONFIG ###
520 ###########################################
520 ###########################################
521 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
521 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
522 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
522 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
523 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
523 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
524 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
524 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
525
525
526 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
526 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
527
527
528 # see sqlalchemy docs for other advanced settings
528 # see sqlalchemy docs for other advanced settings
529
529
530 ## print the sql statements to output
530 ## print the sql statements to output
531 sqlalchemy.db1.echo = false
531 sqlalchemy.db1.echo = false
532 ## recycle the connections after this amount of seconds
532 ## recycle the connections after this amount of seconds
533 sqlalchemy.db1.pool_recycle = 3600
533 sqlalchemy.db1.pool_recycle = 3600
534 sqlalchemy.db1.convert_unicode = true
534 sqlalchemy.db1.convert_unicode = true
535
535
536 ## the number of connections to keep open inside the connection pool.
536 ## the number of connections to keep open inside the connection pool.
537 ## 0 indicates no limit
537 ## 0 indicates no limit
538 #sqlalchemy.db1.pool_size = 5
538 #sqlalchemy.db1.pool_size = 5
539
539
540 ## the number of connections to allow in connection pool "overflow", that is
540 ## the number of connections to allow in connection pool "overflow", that is
541 ## connections that can be opened above and beyond the pool_size setting,
541 ## connections that can be opened above and beyond the pool_size setting,
542 ## which defaults to five.
542 ## which defaults to five.
543 #sqlalchemy.db1.max_overflow = 10
543 #sqlalchemy.db1.max_overflow = 10
544
544
545 ## Connection check ping, used to detect broken database connections
545 ## Connection check ping, used to detect broken database connections
546 ## could be enabled to better handle cases if MySQL has gone away errors
546 ## could be enabled to better handle cases if MySQL has gone away errors
547 #sqlalchemy.db1.ping_connection = true
547 #sqlalchemy.db1.ping_connection = true
548
548
549 ##################
549 ##################
550 ### VCS CONFIG ###
550 ### VCS CONFIG ###
551 ##################
551 ##################
552 vcs.server.enable = true
552 vcs.server.enable = true
553 vcs.server = localhost:9900
553 vcs.server = localhost:9900
554
554
555 ## Web server connectivity protocol, responsible for web based VCS operatations
555 ## Web server connectivity protocol, responsible for web based VCS operatations
556 ## Available protocols are:
556 ## Available protocols are:
557 ## `http` - use http-rpc backend (default)
557 ## `http` - use http-rpc backend (default)
558 vcs.server.protocol = http
558 vcs.server.protocol = http
559
559
560 ## Push/Pull operations protocol, available options are:
560 ## Push/Pull operations protocol, available options are:
561 ## `http` - use http-rpc backend (default)
561 ## `http` - use http-rpc backend (default)
562 ##
562 ##
563 vcs.scm_app_implementation = http
563 vcs.scm_app_implementation = http
564
564
565 ## Push/Pull operations hooks protocol, available options are:
565 ## Push/Pull operations hooks protocol, available options are:
566 ## `http` - use http-rpc backend (default)
566 ## `http` - use http-rpc backend (default)
567 vcs.hooks.protocol = http
567 vcs.hooks.protocol = http
568
568
569 ## Host on which this instance is listening for hooks. If vcsserver is in other location
569 ## Host on which this instance is listening for hooks. If vcsserver is in other location
570 ## this should be adjusted.
570 ## this should be adjusted.
571 vcs.hooks.host = 127.0.0.1
571 vcs.hooks.host = 127.0.0.1
572
572
573 vcs.server.log_level = debug
573 vcs.server.log_level = debug
574 ## Start VCSServer with this instance as a subprocess, usefull for development
574 ## Start VCSServer with this instance as a subprocess, usefull for development
575 vcs.start_server = false
575 vcs.start_server = false
576
576
577 ## List of enabled VCS backends, available options are:
577 ## List of enabled VCS backends, available options are:
578 ## `hg` - mercurial
578 ## `hg` - mercurial
579 ## `git` - git
579 ## `git` - git
580 ## `svn` - subversion
580 ## `svn` - subversion
581 vcs.backends = hg, git, svn
581 vcs.backends = hg, git, svn
582
582
583 vcs.connection_timeout = 3600
583 vcs.connection_timeout = 3600
584 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
584 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
585 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
585 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
586 #vcs.svn.compatible_version = pre-1.8-compatible
586 #vcs.svn.compatible_version = pre-1.8-compatible
587
587
588
588
589 ############################################################
589 ############################################################
590 ### Subversion proxy support (mod_dav_svn) ###
590 ### Subversion proxy support (mod_dav_svn) ###
591 ### Maps RhodeCode repo groups into SVN paths for Apache ###
591 ### Maps RhodeCode repo groups into SVN paths for Apache ###
592 ############################################################
592 ############################################################
593 ## Enable or disable the config file generation.
593 ## Enable or disable the config file generation.
594 svn.proxy.generate_config = false
594 svn.proxy.generate_config = false
595 ## Generate config file with `SVNListParentPath` set to `On`.
595 ## Generate config file with `SVNListParentPath` set to `On`.
596 svn.proxy.list_parent_path = true
596 svn.proxy.list_parent_path = true
597 ## Set location and file name of generated config file.
597 ## Set location and file name of generated config file.
598 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
598 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
599 ## alternative mod_dav config template. This needs to be a mako template
599 ## alternative mod_dav config template. This needs to be a mako template
600 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
600 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
601 ## Used as a prefix to the `Location` block in the generated config file.
601 ## Used as a prefix to the `Location` block in the generated config file.
602 ## In most cases it should be set to `/`.
602 ## In most cases it should be set to `/`.
603 svn.proxy.location_root = /
603 svn.proxy.location_root = /
604 ## Command to reload the mod dav svn configuration on change.
604 ## Command to reload the mod dav svn configuration on change.
605 ## Example: `/etc/init.d/apache2 reload`
605 ## Example: `/etc/init.d/apache2 reload`
606 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
606 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
607 ## If the timeout expires before the reload command finishes, the command will
607 ## If the timeout expires before the reload command finishes, the command will
608 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
608 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
609 #svn.proxy.reload_timeout = 10
609 #svn.proxy.reload_timeout = 10
610
610
611 ############################################################
611 ############################################################
612 ### SSH Support Settings ###
612 ### SSH Support Settings ###
613 ############################################################
613 ############################################################
614
614
615 ## Defines if a custom authorized_keys file should be created and written on
615 ## Defines if a custom authorized_keys file should be created and written on
616 ## any change user ssh keys. Setting this to false also disables posibility
616 ## any change user ssh keys. Setting this to false also disables posibility
617 ## of adding SSH keys by users from web interface. Super admins can still
617 ## of adding SSH keys by users from web interface. Super admins can still
618 ## manage SSH Keys.
618 ## manage SSH Keys.
619 ssh.generate_authorized_keyfile = false
619 ssh.generate_authorized_keyfile = false
620
620
621 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
621 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
622 # ssh.authorized_keys_ssh_opts =
622 # ssh.authorized_keys_ssh_opts =
623
623
624 ## Path to the authrozied_keys file where the generate entries are placed.
624 ## Path to the authrozied_keys file where the generate entries are placed.
625 ## It is possible to have multiple key files specified in `sshd_config` e.g.
625 ## It is possible to have multiple key files specified in `sshd_config` e.g.
626 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
626 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
627 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
627 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
628
628
629 ## Command to execute the SSH wrapper. The binary is available in the
629 ## Command to execute the SSH wrapper. The binary is available in the
630 ## rhodecode installation directory.
630 ## rhodecode installation directory.
631 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
631 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
632 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
632 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
633
633
634 ## Allow shell when executing the ssh-wrapper command
634 ## Allow shell when executing the ssh-wrapper command
635 ssh.wrapper_cmd_allow_shell = false
635 ssh.wrapper_cmd_allow_shell = false
636
636
637 ## Enables logging, and detailed output send back to the client during SSH
637 ## Enables logging, and detailed output send back to the client during SSH
638 ## operations. Usefull for debugging, shouldn't be used in production.
638 ## operations. Usefull for debugging, shouldn't be used in production.
639 ssh.enable_debug_logging = true
639 ssh.enable_debug_logging = true
640
640
641 ## Paths to binary executable, by default they are the names, but we can
641 ## Paths to binary executable, by default they are the names, but we can
642 ## override them if we want to use a custom one
642 ## override them if we want to use a custom one
643 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
643 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
644 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
644 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
645 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
645 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
646
646
647
647
648 ## Dummy marker to add new entries after.
648 ## Dummy marker to add new entries after.
649 ## Add any custom entries below. Please don't remove.
649 ## Add any custom entries below. Please don't remove.
650 custom.conf = 1
650 custom.conf = 1
651
651
652
652
653 ################################
653 ################################
654 ### LOGGING CONFIGURATION ####
654 ### LOGGING CONFIGURATION ####
655 ################################
655 ################################
656 [loggers]
656 [loggers]
657 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
657 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
658
658
659 [handlers]
659 [handlers]
660 keys = console, console_sql
660 keys = console, console_sql
661
661
662 [formatters]
662 [formatters]
663 keys = generic, color_formatter, color_formatter_sql
663 keys = generic, color_formatter, color_formatter_sql
664
664
665 #############
665 #############
666 ## LOGGERS ##
666 ## LOGGERS ##
667 #############
667 #############
668 [logger_root]
668 [logger_root]
669 level = NOTSET
669 level = NOTSET
670 handlers = console
670 handlers = console
671
671
672 [logger_sqlalchemy]
672 [logger_sqlalchemy]
673 level = INFO
673 level = INFO
674 handlers = console_sql
674 handlers = console_sql
675 qualname = sqlalchemy.engine
675 qualname = sqlalchemy.engine
676 propagate = 0
676 propagate = 0
677
677
678 [logger_beaker]
678 [logger_beaker]
679 level = DEBUG
679 level = DEBUG
680 handlers =
680 handlers =
681 qualname = beaker.container
681 qualname = beaker.container
682 propagate = 1
682 propagate = 1
683
683
684 [logger_rhodecode]
684 [logger_rhodecode]
685 level = DEBUG
685 level = DEBUG
686 handlers =
686 handlers =
687 qualname = rhodecode
687 qualname = rhodecode
688 propagate = 1
688 propagate = 1
689
689
690 [logger_ssh_wrapper]
690 [logger_ssh_wrapper]
691 level = DEBUG
691 level = DEBUG
692 handlers =
692 handlers =
693 qualname = ssh_wrapper
693 qualname = ssh_wrapper
694 propagate = 1
694 propagate = 1
695
695
696 [logger_celery]
696 [logger_celery]
697 level = DEBUG
697 level = DEBUG
698 handlers =
698 handlers =
699 qualname = celery
699 qualname = celery
700
700
701
701
702 ##############
702 ##############
703 ## HANDLERS ##
703 ## HANDLERS ##
704 ##############
704 ##############
705
705
706 [handler_console]
706 [handler_console]
707 class = StreamHandler
707 class = StreamHandler
708 args = (sys.stderr, )
708 args = (sys.stderr, )
709 level = DEBUG
709 level = DEBUG
710 formatter = color_formatter
710 formatter = color_formatter
711
711
712 [handler_console_sql]
712 [handler_console_sql]
713 # "level = DEBUG" logs SQL queries and results.
713 # "level = DEBUG" logs SQL queries and results.
714 # "level = INFO" logs SQL queries.
714 # "level = INFO" logs SQL queries.
715 # "level = WARN" logs neither. (Recommended for production systems.)
715 # "level = WARN" logs neither. (Recommended for production systems.)
716 class = StreamHandler
716 class = StreamHandler
717 args = (sys.stderr, )
717 args = (sys.stderr, )
718 level = WARN
718 level = WARN
719 formatter = color_formatter_sql
719 formatter = color_formatter_sql
720
720
721 ################
721 ################
722 ## FORMATTERS ##
722 ## FORMATTERS ##
723 ################
723 ################
724
724
725 [formatter_generic]
725 [formatter_generic]
726 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
726 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
727 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
727 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
728 datefmt = %Y-%m-%d %H:%M:%S
728 datefmt = %Y-%m-%d %H:%M:%S
729
729
730 [formatter_color_formatter]
730 [formatter_color_formatter]
731 class = rhodecode.lib.logging_formatter.ColorFormatter
731 class = rhodecode.lib.logging_formatter.ColorFormatter
732 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
732 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
733 datefmt = %Y-%m-%d %H:%M:%S
733 datefmt = %Y-%m-%d %H:%M:%S
734
734
735 [formatter_color_formatter_sql]
735 [formatter_color_formatter_sql]
736 class = rhodecode.lib.logging_formatter.ColorFormatterSql
736 class = rhodecode.lib.logging_formatter.ColorFormatterSql
737 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
737 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
738 datefmt = %Y-%m-%d %H:%M:%S
738 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,707 +1,707 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 #use = egg:waitress#main
54 #use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 #threads = 5
56 #threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 #max_request_body_size = 107374182400
58 #max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 #asyncore_use_poll = true
61 #asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 use = egg:gunicorn#main
69 use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 workers = 2
74 workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommended to be at 1
76 ## generally recommended to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 proc_name = rhodecode
79 proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 worker_class = gevent
82 worker_class = gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 max_requests = 1000
87 max_requests = 1000
88 max_requests_jitter = 30
88 max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 timeout = 21600
91 timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 ## encryption key used to encrypt social plugin tokens,
111 ## encryption key used to encrypt social plugin tokens,
112 ## remote_urls with credentials etc, if not set it defaults to
112 ## remote_urls with credentials etc, if not set it defaults to
113 ## `beaker.session.secret`
113 ## `beaker.session.secret`
114 #rhodecode.encrypted_values.secret =
114 #rhodecode.encrypted_values.secret =
115
115
116 ## decryption strict mode (enabled by default). It controls if decryption raises
116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 #rhodecode.encrypted_values.strict = false
118 #rhodecode.encrypted_values.strict = false
119
119
120 ## return gzipped responses from Rhodecode (static files/application)
120 ## return gzipped responses from Rhodecode (static files/application)
121 gzip_responses = false
121 gzip_responses = false
122
122
123 ## autogenerate javascript routes file on startup
123 ## autogenerate javascript routes file on startup
124 generate_js_files = false
124 generate_js_files = false
125
125
126 ## Optional Languages
126 ## Optional Languages
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 lang = en
128 lang = en
129
129
130 ## perform a full repository scan on each server start, this should be
130 ## perform a full repository scan on each server start, this should be
131 ## set to false after first startup, to allow faster server restarts.
131 ## set to false after first startup, to allow faster server restarts.
132 startup.import_repos = false
132 startup.import_repos = false
133
133
134 ## Uncomment and set this path to use archive download cache.
134 ## Uncomment and set this path to use archive download cache.
135 ## Once enabled, generated archives will be cached at this location
135 ## Once enabled, generated archives will be cached at this location
136 ## and served from the cache during subsequent requests for the same archive of
136 ## and served from the cache during subsequent requests for the same archive of
137 ## the repository.
137 ## the repository.
138 #archive_cache_dir = /tmp/tarballcache
138 #archive_cache_dir = /tmp/tarballcache
139
139
140 ## URL at which the application is running. This is used for bootstraping
140 ## URL at which the application is running. This is used for bootstraping
141 ## requests in context when no web request is available. Used in ishell, or
141 ## requests in context when no web request is available. Used in ishell, or
142 ## SSH calls. Set this for events to receive proper url for SSH calls.
142 ## SSH calls. Set this for events to receive proper url for SSH calls.
143 app.base_url = http://rhodecode.local
143 app.base_url = http://rhodecode.local
144
144
145 ## change this to unique ID for security
145 ## change this to unique ID for security
146 app_instance_uuid = rc-production
146 app_instance_uuid = rc-production
147
147
148 ## cut off limit for large diffs (size in bytes). If overall diff size on
148 ## cut off limit for large diffs (size in bytes). If overall diff size on
149 ## commit, or pull request exceeds this limit this diff will be displayed
149 ## commit, or pull request exceeds this limit this diff will be displayed
150 ## partially. E.g 512000 == 512Kb
150 ## partially. E.g 512000 == 512Kb
151 cut_off_limit_diff = 512000
151 cut_off_limit_diff = 512000
152
152
153 ## cut off limit for large files inside diffs (size in bytes). Each individual
153 ## cut off limit for large files inside diffs (size in bytes). Each individual
154 ## file inside diff which exceeds this limit will be displayed partially.
154 ## file inside diff which exceeds this limit will be displayed partially.
155 ## E.g 128000 == 128Kb
155 ## E.g 128000 == 128Kb
156 cut_off_limit_file = 128000
156 cut_off_limit_file = 128000
157
157
158 ## use cache version of scm repo everywhere
158 ## use cache version of scm repo everywhere
159 vcs_full_cache = true
159 vcs_full_cache = true
160
160
161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
162 ## Normally this is controlled by proper http flags sent from http server
162 ## Normally this is controlled by proper http flags sent from http server
163 force_https = false
163 force_https = false
164
164
165 ## use Strict-Transport-Security headers
165 ## use Strict-Transport-Security headers
166 use_htsts = false
166 use_htsts = false
167
167
168 ## git rev filter option, --all is the default filter, if you need to
168 ## git rev filter option, --all is the default filter, if you need to
169 ## hide all refs in changelog switch this to --branches --tags
169 ## hide all refs in changelog switch this to --branches --tags
170 git_rev_filter = --branches --tags
170 git_rev_filter = --branches --tags
171
171
172 # Set to true if your repos are exposed using the dumb protocol
172 # Set to true if your repos are exposed using the dumb protocol
173 git_update_server_info = false
173 git_update_server_info = false
174
174
175 ## RSS/ATOM feed options
175 ## RSS/ATOM feed options
176 rss_cut_off_limit = 256000
176 rss_cut_off_limit = 256000
177 rss_items_per_page = 10
177 rss_items_per_page = 10
178 rss_include_diff = false
178 rss_include_diff = false
179
179
180 ## gist URL alias, used to create nicer urls for gist. This should be an
180 ## gist URL alias, used to create nicer urls for gist. This should be an
181 ## url that does rewrites to _admin/gists/{gistid}.
181 ## url that does rewrites to _admin/gists/{gistid}.
182 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
182 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
183 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
183 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
184 gist_alias_url =
184 gist_alias_url =
185
185
186 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
186 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
187 ## used for access.
187 ## used for access.
188 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
188 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
189 ## came from the the logged in user who own this authentication token.
189 ## came from the the logged in user who own this authentication token.
190 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
190 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
191 ## authentication token. Such view would be only accessible when used together
191 ## authentication token. Such view would be only accessible when used together
192 ## with this authentication token
192 ## with this authentication token
193 ##
193 ##
194 ## list of all views can be found under `/_admin/permissions/auth_token_access`
194 ## list of all views can be found under `/_admin/permissions/auth_token_access`
195 ## The list should be "," separated and on a single line.
195 ## The list should be "," separated and on a single line.
196 ##
196 ##
197 ## Most common views to enable:
197 ## Most common views to enable:
198 # RepoCommitsView:repo_commit_download
198 # RepoCommitsView:repo_commit_download
199 # RepoCommitsView:repo_commit_patch
199 # RepoCommitsView:repo_commit_patch
200 # RepoCommitsView:repo_commit_raw
200 # RepoCommitsView:repo_commit_raw
201 # RepoCommitsView:repo_commit_raw@TOKEN
201 # RepoCommitsView:repo_commit_raw@TOKEN
202 # RepoFilesView:repo_files_diff
202 # RepoFilesView:repo_files_diff
203 # RepoFilesView:repo_archivefile
203 # RepoFilesView:repo_archivefile
204 # RepoFilesView:repo_file_raw
204 # RepoFilesView:repo_file_raw
205 # GistView:*
205 # GistView:*
206 api_access_controllers_whitelist =
206 api_access_controllers_whitelist =
207
207
208 ## default encoding used to convert from and to unicode
208 ## default encoding used to convert from and to unicode
209 ## can be also a comma separated list of encoding in case of mixed encodings
209 ## can be also a comma separated list of encoding in case of mixed encodings
210 default_encoding = UTF-8
210 default_encoding = UTF-8
211
211
212 ## instance-id prefix
212 ## instance-id prefix
213 ## a prefix key for this instance used for cache invalidation when running
213 ## a prefix key for this instance used for cache invalidation when running
214 ## multiple instances of rhodecode, make sure it's globally unique for
214 ## multiple instances of rhodecode, make sure it's globally unique for
215 ## all running rhodecode instances. Leave empty if you don't use it
215 ## all running rhodecode instances. Leave empty if you don't use it
216 instance_id =
216 instance_id =
217
217
218 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
218 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
219 ## of an authentication plugin also if it is disabled by it's settings.
219 ## of an authentication plugin also if it is disabled by it's settings.
220 ## This could be useful if you are unable to log in to the system due to broken
220 ## This could be useful if you are unable to log in to the system due to broken
221 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
221 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
222 ## module to log in again and fix the settings.
222 ## module to log in again and fix the settings.
223 ##
223 ##
224 ## Available builtin plugin IDs (hash is part of the ID):
224 ## Available builtin plugin IDs (hash is part of the ID):
225 ## egg:rhodecode-enterprise-ce#rhodecode
225 ## egg:rhodecode-enterprise-ce#rhodecode
226 ## egg:rhodecode-enterprise-ce#pam
226 ## egg:rhodecode-enterprise-ce#pam
227 ## egg:rhodecode-enterprise-ce#ldap
227 ## egg:rhodecode-enterprise-ce#ldap
228 ## egg:rhodecode-enterprise-ce#jasig_cas
228 ## egg:rhodecode-enterprise-ce#jasig_cas
229 ## egg:rhodecode-enterprise-ce#headers
229 ## egg:rhodecode-enterprise-ce#headers
230 ## egg:rhodecode-enterprise-ce#crowd
230 ## egg:rhodecode-enterprise-ce#crowd
231 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
231 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
232
232
233 ## alternative return HTTP header for failed authentication. Default HTTP
233 ## alternative return HTTP header for failed authentication. Default HTTP
234 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
234 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
235 ## handling that causing a series of failed authentication calls.
235 ## handling that causing a series of failed authentication calls.
236 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
236 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
237 ## This will be served instead of default 401 on bad authnetication
237 ## This will be served instead of default 401 on bad authnetication
238 auth_ret_code =
238 auth_ret_code =
239
239
240 ## use special detection method when serving auth_ret_code, instead of serving
240 ## use special detection method when serving auth_ret_code, instead of serving
241 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
241 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
242 ## and then serve auth_ret_code to clients
242 ## and then serve auth_ret_code to clients
243 auth_ret_code_detection = false
243 auth_ret_code_detection = false
244
244
245 ## locking return code. When repository is locked return this HTTP code. 2XX
245 ## locking return code. When repository is locked return this HTTP code. 2XX
246 ## codes don't break the transactions while 4XX codes do
246 ## codes don't break the transactions while 4XX codes do
247 lock_ret_code = 423
247 lock_ret_code = 423
248
248
249 ## allows to change the repository location in settings page
249 ## allows to change the repository location in settings page
250 allow_repo_location_change = true
250 allow_repo_location_change = true
251
251
252 ## allows to setup custom hooks in settings page
252 ## allows to setup custom hooks in settings page
253 allow_custom_hooks_settings = true
253 allow_custom_hooks_settings = true
254
254
255 ## generated license token, goto license page in RhodeCode settings to obtain
255 ## generated license token, goto license page in RhodeCode settings to obtain
256 ## new token
256 ## new token
257 license_token =
257 license_token =
258
258
259 ## supervisor connection uri, for managing supervisor and logs.
259 ## supervisor connection uri, for managing supervisor and logs.
260 supervisor.uri =
260 supervisor.uri =
261 ## supervisord group name/id we only want this RC instance to handle
261 ## supervisord group name/id we only want this RC instance to handle
262 supervisor.group_id = prod
262 supervisor.group_id = prod
263
263
264 ## Display extended labs settings
264 ## Display extended labs settings
265 labs_settings_active = true
265 labs_settings_active = true
266
266
267 ####################################
267 ####################################
268 ### CELERY CONFIG ####
268 ### CELERY CONFIG ####
269 ####################################
269 ####################################
270 ## run: /path/to/celery worker \
270 ## run: /path/to/celery worker \
271 ## -E --beat --app rhodecode.lib.celerylib.loader \
271 ## -E --beat --app rhodecode.lib.celerylib.loader \
272 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
272 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
273 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
273 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
274
274
275 use_celery = false
275 use_celery = false
276
276
277 ## connection url to the message broker (default rabbitmq)
277 ## connection url to the message broker (default rabbitmq)
278 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
278 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
279
279
280 ## maximum tasks to execute before worker restart
280 ## maximum tasks to execute before worker restart
281 celery.max_tasks_per_child = 100
281 celery.max_tasks_per_child = 100
282
282
283 ## tasks will never be sent to the queue, but executed locally instead.
283 ## tasks will never be sent to the queue, but executed locally instead.
284 celery.task_always_eager = false
284 celery.task_always_eager = false
285
285
286 #####################################
286 #####################################
287 ### DOGPILE CACHE ####
287 ### DOGPILE CACHE ####
288 #####################################
288 #####################################
289 ## Default cache dir for caches. Putting this into a ramdisk
289 ## Default cache dir for caches. Putting this into a ramdisk
290 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
290 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
291 ## of space
291 ## of space
292 cache_dir = /tmp/rcdev/data
292 cache_dir = /tmp/rcdev/data
293
293
294 ## cache settings for permission tree, auth TTL.
294 ## cache settings for permission tree, auth TTL.
295 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
295 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
296 rc_cache.cache_perms.expiration_time = 300
296 rc_cache.cache_perms.expiration_time = 300
297 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
297 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
298
298
299 ## redis backend with distributed locks
299 ## redis backend with distributed locks
300 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
300 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
301 #rc_cache.cache_perms.expiration_time = 300
301 #rc_cache.cache_perms.expiration_time = 300
302 #rc_cache.cache_perms.arguments.host = localhost
302 #rc_cache.cache_perms.arguments.host = localhost
303 #rc_cache.cache_perms.arguments.port = 6379
303 #rc_cache.cache_perms.arguments.port = 6379
304 #rc_cache.cache_perms.arguments.db = 0
304 #rc_cache.cache_perms.arguments.db = 0
305 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
305 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
306 #rc_cache.cache_perms.arguments.distributed_lock = true
306 #rc_cache.cache_perms.arguments.distributed_lock = true
307
307
308
308
309 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
309 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
310 rc_cache.cache_repo.expiration_time = 2592000
310 rc_cache.cache_repo.expiration_time = 2592000
311 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
311 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
312
312
313 ## redis backend with distributed locks
313 ## redis backend with distributed locks
314 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
314 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
315 #rc_cache.cache_repo.expiration_time = 2592000
315 #rc_cache.cache_repo.expiration_time = 2592000
316 ## this needs to be greater then expiration_time
316 ## this needs to be greater then expiration_time
317 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
317 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
318 #rc_cache.cache_repo.arguments.host = localhost
318 #rc_cache.cache_repo.arguments.host = localhost
319 #rc_cache.cache_repo.arguments.port = 6379
319 #rc_cache.cache_repo.arguments.port = 6379
320 #rc_cache.cache_repo.arguments.db = 1
320 #rc_cache.cache_repo.arguments.db = 1
321 #rc_cache.cache_repo.arguments.distributed_lock = true
321 #rc_cache.cache_repo.arguments.distributed_lock = true
322
322
323 ## cache settings for SQL queries
324 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
325 rc_cache.sql_cache_short.expiration_time = 30
326
323
327
324 ####################################
328 ####################################
325 ### BEAKER CACHE ####
329 ### BEAKER CACHE ####
326 ####################################
330 ####################################
327
331
328 ## locking and default file storage for Beaker. Putting this into a ramdisk
332 ## locking and default file storage for Beaker. Putting this into a ramdisk
329 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
333 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
330 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
334 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
331 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
335 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
332
336
333 beaker.cache.regions = long_term, sql_cache_short
337 beaker.cache.regions = long_term
334
338
335 beaker.cache.long_term.type = memory
339 beaker.cache.long_term.type = memory
336 beaker.cache.long_term.expire = 172800
340 beaker.cache.long_term.expire = 172800
337 beaker.cache.long_term.key_length = 256
341 beaker.cache.long_term.key_length = 256
338
342
339 beaker.cache.sql_cache_short.type = memory
340 beaker.cache.sql_cache_short.expire = 10
341 beaker.cache.sql_cache_short.key_length = 256
342
343
343
344 ####################################
344 ####################################
345 ### BEAKER SESSION ####
345 ### BEAKER SESSION ####
346 ####################################
346 ####################################
347
347
348 ## .session.type is type of storage options for the session, current allowed
348 ## .session.type is type of storage options for the session, current allowed
349 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
349 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
350 beaker.session.type = file
350 beaker.session.type = file
351 beaker.session.data_dir = %(here)s/data/sessions
351 beaker.session.data_dir = %(here)s/data/sessions
352
352
353 ## db based session, fast, and allows easy management over logged in users
353 ## db based session, fast, and allows easy management over logged in users
354 #beaker.session.type = ext:database
354 #beaker.session.type = ext:database
355 #beaker.session.table_name = db_session
355 #beaker.session.table_name = db_session
356 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
356 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
357 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
357 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
358 #beaker.session.sa.pool_recycle = 3600
358 #beaker.session.sa.pool_recycle = 3600
359 #beaker.session.sa.echo = false
359 #beaker.session.sa.echo = false
360
360
361 beaker.session.key = rhodecode
361 beaker.session.key = rhodecode
362 beaker.session.secret = production-rc-uytcxaz
362 beaker.session.secret = production-rc-uytcxaz
363 beaker.session.lock_dir = %(here)s/data/sessions/lock
363 beaker.session.lock_dir = %(here)s/data/sessions/lock
364
364
365 ## Secure encrypted cookie. Requires AES and AES python libraries
365 ## Secure encrypted cookie. Requires AES and AES python libraries
366 ## you must disable beaker.session.secret to use this
366 ## you must disable beaker.session.secret to use this
367 #beaker.session.encrypt_key = key_for_encryption
367 #beaker.session.encrypt_key = key_for_encryption
368 #beaker.session.validate_key = validation_key
368 #beaker.session.validate_key = validation_key
369
369
370 ## sets session as invalid(also logging out user) if it haven not been
370 ## sets session as invalid(also logging out user) if it haven not been
371 ## accessed for given amount of time in seconds
371 ## accessed for given amount of time in seconds
372 beaker.session.timeout = 2592000
372 beaker.session.timeout = 2592000
373 beaker.session.httponly = true
373 beaker.session.httponly = true
374 ## Path to use for the cookie. Set to prefix if you use prefix middleware
374 ## Path to use for the cookie. Set to prefix if you use prefix middleware
375 #beaker.session.cookie_path = /custom_prefix
375 #beaker.session.cookie_path = /custom_prefix
376
376
377 ## uncomment for https secure cookie
377 ## uncomment for https secure cookie
378 beaker.session.secure = false
378 beaker.session.secure = false
379
379
380 ## auto save the session to not to use .save()
380 ## auto save the session to not to use .save()
381 beaker.session.auto = false
381 beaker.session.auto = false
382
382
383 ## default cookie expiration time in seconds, set to `true` to set expire
383 ## default cookie expiration time in seconds, set to `true` to set expire
384 ## at browser close
384 ## at browser close
385 #beaker.session.cookie_expires = 3600
385 #beaker.session.cookie_expires = 3600
386
386
387 ###################################
387 ###################################
388 ## SEARCH INDEXING CONFIGURATION ##
388 ## SEARCH INDEXING CONFIGURATION ##
389 ###################################
389 ###################################
390 ## Full text search indexer is available in rhodecode-tools under
390 ## Full text search indexer is available in rhodecode-tools under
391 ## `rhodecode-tools index` command
391 ## `rhodecode-tools index` command
392
392
393 ## WHOOSH Backend, doesn't require additional services to run
393 ## WHOOSH Backend, doesn't require additional services to run
394 ## it works good with few dozen repos
394 ## it works good with few dozen repos
395 search.module = rhodecode.lib.index.whoosh
395 search.module = rhodecode.lib.index.whoosh
396 search.location = %(here)s/data/index
396 search.location = %(here)s/data/index
397
397
398 ########################################
398 ########################################
399 ### CHANNELSTREAM CONFIG ####
399 ### CHANNELSTREAM CONFIG ####
400 ########################################
400 ########################################
401 ## channelstream enables persistent connections and live notification
401 ## channelstream enables persistent connections and live notification
402 ## in the system. It's also used by the chat system
402 ## in the system. It's also used by the chat system
403 channelstream.enabled = false
403 channelstream.enabled = false
404
404
405 ## server address for channelstream server on the backend
405 ## server address for channelstream server on the backend
406 channelstream.server = 127.0.0.1:9800
406 channelstream.server = 127.0.0.1:9800
407
407
408 ## location of the channelstream server from outside world
408 ## location of the channelstream server from outside world
409 ## use ws:// for http or wss:// for https. This address needs to be handled
409 ## use ws:// for http or wss:// for https. This address needs to be handled
410 ## by external HTTP server such as Nginx or Apache
410 ## by external HTTP server such as Nginx or Apache
411 ## see nginx/apache configuration examples in our docs
411 ## see nginx/apache configuration examples in our docs
412 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
412 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
413 channelstream.secret = secret
413 channelstream.secret = secret
414 channelstream.history.location = %(here)s/channelstream_history
414 channelstream.history.location = %(here)s/channelstream_history
415
415
416 ## Internal application path that Javascript uses to connect into.
416 ## Internal application path that Javascript uses to connect into.
417 ## If you use proxy-prefix the prefix should be added before /_channelstream
417 ## If you use proxy-prefix the prefix should be added before /_channelstream
418 channelstream.proxy_path = /_channelstream
418 channelstream.proxy_path = /_channelstream
419
419
420
420
421 ###################################
421 ###################################
422 ## APPENLIGHT CONFIG ##
422 ## APPENLIGHT CONFIG ##
423 ###################################
423 ###################################
424
424
425 ## Appenlight is tailored to work with RhodeCode, see
425 ## Appenlight is tailored to work with RhodeCode, see
426 ## http://appenlight.com for details how to obtain an account
426 ## http://appenlight.com for details how to obtain an account
427
427
428 ## appenlight integration enabled
428 ## appenlight integration enabled
429 appenlight = false
429 appenlight = false
430
430
431 appenlight.server_url = https://api.appenlight.com
431 appenlight.server_url = https://api.appenlight.com
432 appenlight.api_key = YOUR_API_KEY
432 appenlight.api_key = YOUR_API_KEY
433 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
433 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
434
434
435 # used for JS client
435 # used for JS client
436 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
436 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
437
437
438 ## TWEAK AMOUNT OF INFO SENT HERE
438 ## TWEAK AMOUNT OF INFO SENT HERE
439
439
440 ## enables 404 error logging (default False)
440 ## enables 404 error logging (default False)
441 appenlight.report_404 = false
441 appenlight.report_404 = false
442
442
443 ## time in seconds after request is considered being slow (default 1)
443 ## time in seconds after request is considered being slow (default 1)
444 appenlight.slow_request_time = 1
444 appenlight.slow_request_time = 1
445
445
446 ## record slow requests in application
446 ## record slow requests in application
447 ## (needs to be enabled for slow datastore recording and time tracking)
447 ## (needs to be enabled for slow datastore recording and time tracking)
448 appenlight.slow_requests = true
448 appenlight.slow_requests = true
449
449
450 ## enable hooking to application loggers
450 ## enable hooking to application loggers
451 appenlight.logging = true
451 appenlight.logging = true
452
452
453 ## minimum log level for log capture
453 ## minimum log level for log capture
454 appenlight.logging.level = WARNING
454 appenlight.logging.level = WARNING
455
455
456 ## send logs only from erroneous/slow requests
456 ## send logs only from erroneous/slow requests
457 ## (saves API quota for intensive logging)
457 ## (saves API quota for intensive logging)
458 appenlight.logging_on_error = false
458 appenlight.logging_on_error = false
459
459
460 ## list of additonal keywords that should be grabbed from environ object
460 ## list of additonal keywords that should be grabbed from environ object
461 ## can be string with comma separated list of words in lowercase
461 ## can be string with comma separated list of words in lowercase
462 ## (by default client will always send following info:
462 ## (by default client will always send following info:
463 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
463 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
464 ## start with HTTP* this list be extended with additional keywords here
464 ## start with HTTP* this list be extended with additional keywords here
465 appenlight.environ_keys_whitelist =
465 appenlight.environ_keys_whitelist =
466
466
467 ## list of keywords that should be blanked from request object
467 ## list of keywords that should be blanked from request object
468 ## can be string with comma separated list of words in lowercase
468 ## can be string with comma separated list of words in lowercase
469 ## (by default client will always blank keys that contain following words
469 ## (by default client will always blank keys that contain following words
470 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
470 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
471 ## this list be extended with additional keywords set here
471 ## this list be extended with additional keywords set here
472 appenlight.request_keys_blacklist =
472 appenlight.request_keys_blacklist =
473
473
474 ## list of namespaces that should be ignores when gathering log entries
474 ## list of namespaces that should be ignores when gathering log entries
475 ## can be string with comma separated list of namespaces
475 ## can be string with comma separated list of namespaces
476 ## (by default the client ignores own entries: appenlight_client.client)
476 ## (by default the client ignores own entries: appenlight_client.client)
477 appenlight.log_namespace_blacklist =
477 appenlight.log_namespace_blacklist =
478
478
479
479
480 ################################################################################
480 ################################################################################
481 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
481 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
482 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
482 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
483 ## execute malicious code after an exception is raised. ##
483 ## execute malicious code after an exception is raised. ##
484 ################################################################################
484 ################################################################################
485 set debug = false
485 set debug = false
486
486
487
487
488 ###########################################
488 ###########################################
489 ### MAIN RHODECODE DATABASE CONFIG ###
489 ### MAIN RHODECODE DATABASE CONFIG ###
490 ###########################################
490 ###########################################
491 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
491 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
492 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
492 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
493 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
493 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
494 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
494 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
495
495
496 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
496 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
497
497
498 # see sqlalchemy docs for other advanced settings
498 # see sqlalchemy docs for other advanced settings
499
499
500 ## print the sql statements to output
500 ## print the sql statements to output
501 sqlalchemy.db1.echo = false
501 sqlalchemy.db1.echo = false
502 ## recycle the connections after this amount of seconds
502 ## recycle the connections after this amount of seconds
503 sqlalchemy.db1.pool_recycle = 3600
503 sqlalchemy.db1.pool_recycle = 3600
504 sqlalchemy.db1.convert_unicode = true
504 sqlalchemy.db1.convert_unicode = true
505
505
506 ## the number of connections to keep open inside the connection pool.
506 ## the number of connections to keep open inside the connection pool.
507 ## 0 indicates no limit
507 ## 0 indicates no limit
508 #sqlalchemy.db1.pool_size = 5
508 #sqlalchemy.db1.pool_size = 5
509
509
510 ## the number of connections to allow in connection pool "overflow", that is
510 ## the number of connections to allow in connection pool "overflow", that is
511 ## connections that can be opened above and beyond the pool_size setting,
511 ## connections that can be opened above and beyond the pool_size setting,
512 ## which defaults to five.
512 ## which defaults to five.
513 #sqlalchemy.db1.max_overflow = 10
513 #sqlalchemy.db1.max_overflow = 10
514
514
515 ## Connection check ping, used to detect broken database connections
515 ## Connection check ping, used to detect broken database connections
516 ## could be enabled to better handle cases if MySQL has gone away errors
516 ## could be enabled to better handle cases if MySQL has gone away errors
517 #sqlalchemy.db1.ping_connection = true
517 #sqlalchemy.db1.ping_connection = true
518
518
519 ##################
519 ##################
520 ### VCS CONFIG ###
520 ### VCS CONFIG ###
521 ##################
521 ##################
522 vcs.server.enable = true
522 vcs.server.enable = true
523 vcs.server = localhost:9900
523 vcs.server = localhost:9900
524
524
525 ## Web server connectivity protocol, responsible for web based VCS operatations
525 ## Web server connectivity protocol, responsible for web based VCS operatations
526 ## Available protocols are:
526 ## Available protocols are:
527 ## `http` - use http-rpc backend (default)
527 ## `http` - use http-rpc backend (default)
528 vcs.server.protocol = http
528 vcs.server.protocol = http
529
529
530 ## Push/Pull operations protocol, available options are:
530 ## Push/Pull operations protocol, available options are:
531 ## `http` - use http-rpc backend (default)
531 ## `http` - use http-rpc backend (default)
532 ##
532 ##
533 vcs.scm_app_implementation = http
533 vcs.scm_app_implementation = http
534
534
535 ## Push/Pull operations hooks protocol, available options are:
535 ## Push/Pull operations hooks protocol, available options are:
536 ## `http` - use http-rpc backend (default)
536 ## `http` - use http-rpc backend (default)
537 vcs.hooks.protocol = http
537 vcs.hooks.protocol = http
538 ## Host on which this instance is listening for hooks. If vcsserver is in other location
538 ## Host on which this instance is listening for hooks. If vcsserver is in other location
539 ## this should be adjusted.
539 ## this should be adjusted.
540 vcs.hooks.host = 127.0.0.1
540 vcs.hooks.host = 127.0.0.1
541
541
542 vcs.server.log_level = info
542 vcs.server.log_level = info
543 ## Start VCSServer with this instance as a subprocess, usefull for development
543 ## Start VCSServer with this instance as a subprocess, usefull for development
544 vcs.start_server = false
544 vcs.start_server = false
545
545
546 ## List of enabled VCS backends, available options are:
546 ## List of enabled VCS backends, available options are:
547 ## `hg` - mercurial
547 ## `hg` - mercurial
548 ## `git` - git
548 ## `git` - git
549 ## `svn` - subversion
549 ## `svn` - subversion
550 vcs.backends = hg, git, svn
550 vcs.backends = hg, git, svn
551
551
552 vcs.connection_timeout = 3600
552 vcs.connection_timeout = 3600
553 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
553 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
554 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
554 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
555 #vcs.svn.compatible_version = pre-1.8-compatible
555 #vcs.svn.compatible_version = pre-1.8-compatible
556
556
557
557
558 ############################################################
558 ############################################################
559 ### Subversion proxy support (mod_dav_svn) ###
559 ### Subversion proxy support (mod_dav_svn) ###
560 ### Maps RhodeCode repo groups into SVN paths for Apache ###
560 ### Maps RhodeCode repo groups into SVN paths for Apache ###
561 ############################################################
561 ############################################################
562 ## Enable or disable the config file generation.
562 ## Enable or disable the config file generation.
563 svn.proxy.generate_config = false
563 svn.proxy.generate_config = false
564 ## Generate config file with `SVNListParentPath` set to `On`.
564 ## Generate config file with `SVNListParentPath` set to `On`.
565 svn.proxy.list_parent_path = true
565 svn.proxy.list_parent_path = true
566 ## Set location and file name of generated config file.
566 ## Set location and file name of generated config file.
567 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
567 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
568 ## alternative mod_dav config template. This needs to be a mako template
568 ## alternative mod_dav config template. This needs to be a mako template
569 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
569 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
570 ## Used as a prefix to the `Location` block in the generated config file.
570 ## Used as a prefix to the `Location` block in the generated config file.
571 ## In most cases it should be set to `/`.
571 ## In most cases it should be set to `/`.
572 svn.proxy.location_root = /
572 svn.proxy.location_root = /
573 ## Command to reload the mod dav svn configuration on change.
573 ## Command to reload the mod dav svn configuration on change.
574 ## Example: `/etc/init.d/apache2 reload`
574 ## Example: `/etc/init.d/apache2 reload`
575 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
575 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
576 ## If the timeout expires before the reload command finishes, the command will
576 ## If the timeout expires before the reload command finishes, the command will
577 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
577 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
578 #svn.proxy.reload_timeout = 10
578 #svn.proxy.reload_timeout = 10
579
579
580 ############################################################
580 ############################################################
581 ### SSH Support Settings ###
581 ### SSH Support Settings ###
582 ############################################################
582 ############################################################
583
583
584 ## Defines if a custom authorized_keys file should be created and written on
584 ## Defines if a custom authorized_keys file should be created and written on
585 ## any change user ssh keys. Setting this to false also disables posibility
585 ## any change user ssh keys. Setting this to false also disables posibility
586 ## of adding SSH keys by users from web interface. Super admins can still
586 ## of adding SSH keys by users from web interface. Super admins can still
587 ## manage SSH Keys.
587 ## manage SSH Keys.
588 ssh.generate_authorized_keyfile = false
588 ssh.generate_authorized_keyfile = false
589
589
590 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
590 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
591 # ssh.authorized_keys_ssh_opts =
591 # ssh.authorized_keys_ssh_opts =
592
592
593 ## Path to the authrozied_keys file where the generate entries are placed.
593 ## Path to the authrozied_keys file where the generate entries are placed.
594 ## It is possible to have multiple key files specified in `sshd_config` e.g.
594 ## It is possible to have multiple key files specified in `sshd_config` e.g.
595 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
595 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
596 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
596 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
597
597
598 ## Command to execute the SSH wrapper. The binary is available in the
598 ## Command to execute the SSH wrapper. The binary is available in the
599 ## rhodecode installation directory.
599 ## rhodecode installation directory.
600 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
600 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
601 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
601 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
602
602
603 ## Allow shell when executing the ssh-wrapper command
603 ## Allow shell when executing the ssh-wrapper command
604 ssh.wrapper_cmd_allow_shell = false
604 ssh.wrapper_cmd_allow_shell = false
605
605
606 ## Enables logging, and detailed output send back to the client during SSH
606 ## Enables logging, and detailed output send back to the client during SSH
607 ## operations. Usefull for debugging, shouldn't be used in production.
607 ## operations. Usefull for debugging, shouldn't be used in production.
608 ssh.enable_debug_logging = false
608 ssh.enable_debug_logging = false
609
609
610 ## Paths to binary executable, by default they are the names, but we can
610 ## Paths to binary executable, by default they are the names, but we can
611 ## override them if we want to use a custom one
611 ## override them if we want to use a custom one
612 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
612 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
613 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
613 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
614 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
614 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
615
615
616
616
617 ## Dummy marker to add new entries after.
617 ## Dummy marker to add new entries after.
618 ## Add any custom entries below. Please don't remove.
618 ## Add any custom entries below. Please don't remove.
619 custom.conf = 1
619 custom.conf = 1
620
620
621
621
622 ################################
622 ################################
623 ### LOGGING CONFIGURATION ####
623 ### LOGGING CONFIGURATION ####
624 ################################
624 ################################
625 [loggers]
625 [loggers]
626 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
626 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
627
627
628 [handlers]
628 [handlers]
629 keys = console, console_sql
629 keys = console, console_sql
630
630
631 [formatters]
631 [formatters]
632 keys = generic, color_formatter, color_formatter_sql
632 keys = generic, color_formatter, color_formatter_sql
633
633
634 #############
634 #############
635 ## LOGGERS ##
635 ## LOGGERS ##
636 #############
636 #############
637 [logger_root]
637 [logger_root]
638 level = NOTSET
638 level = NOTSET
639 handlers = console
639 handlers = console
640
640
641 [logger_sqlalchemy]
641 [logger_sqlalchemy]
642 level = INFO
642 level = INFO
643 handlers = console_sql
643 handlers = console_sql
644 qualname = sqlalchemy.engine
644 qualname = sqlalchemy.engine
645 propagate = 0
645 propagate = 0
646
646
647 [logger_beaker]
647 [logger_beaker]
648 level = DEBUG
648 level = DEBUG
649 handlers =
649 handlers =
650 qualname = beaker.container
650 qualname = beaker.container
651 propagate = 1
651 propagate = 1
652
652
653 [logger_rhodecode]
653 [logger_rhodecode]
654 level = DEBUG
654 level = DEBUG
655 handlers =
655 handlers =
656 qualname = rhodecode
656 qualname = rhodecode
657 propagate = 1
657 propagate = 1
658
658
659 [logger_ssh_wrapper]
659 [logger_ssh_wrapper]
660 level = DEBUG
660 level = DEBUG
661 handlers =
661 handlers =
662 qualname = ssh_wrapper
662 qualname = ssh_wrapper
663 propagate = 1
663 propagate = 1
664
664
665 [logger_celery]
665 [logger_celery]
666 level = DEBUG
666 level = DEBUG
667 handlers =
667 handlers =
668 qualname = celery
668 qualname = celery
669
669
670
670
671 ##############
671 ##############
672 ## HANDLERS ##
672 ## HANDLERS ##
673 ##############
673 ##############
674
674
675 [handler_console]
675 [handler_console]
676 class = StreamHandler
676 class = StreamHandler
677 args = (sys.stderr, )
677 args = (sys.stderr, )
678 level = INFO
678 level = INFO
679 formatter = generic
679 formatter = generic
680
680
681 [handler_console_sql]
681 [handler_console_sql]
682 # "level = DEBUG" logs SQL queries and results.
682 # "level = DEBUG" logs SQL queries and results.
683 # "level = INFO" logs SQL queries.
683 # "level = INFO" logs SQL queries.
684 # "level = WARN" logs neither. (Recommended for production systems.)
684 # "level = WARN" logs neither. (Recommended for production systems.)
685 class = StreamHandler
685 class = StreamHandler
686 args = (sys.stderr, )
686 args = (sys.stderr, )
687 level = WARN
687 level = WARN
688 formatter = generic
688 formatter = generic
689
689
690 ################
690 ################
691 ## FORMATTERS ##
691 ## FORMATTERS ##
692 ################
692 ################
693
693
694 [formatter_generic]
694 [formatter_generic]
695 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
695 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
696 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
696 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
697 datefmt = %Y-%m-%d %H:%M:%S
697 datefmt = %Y-%m-%d %H:%M:%S
698
698
699 [formatter_color_formatter]
699 [formatter_color_formatter]
700 class = rhodecode.lib.logging_formatter.ColorFormatter
700 class = rhodecode.lib.logging_formatter.ColorFormatter
701 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
701 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
702 datefmt = %Y-%m-%d %H:%M:%S
702 datefmt = %Y-%m-%d %H:%M:%S
703
703
704 [formatter_color_formatter_sql]
704 [formatter_color_formatter_sql]
705 class = rhodecode.lib.logging_formatter.ColorFormatterSql
705 class = rhodecode.lib.logging_formatter.ColorFormatterSql
706 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
706 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
707 datefmt = %Y-%m-%d %H:%M:%S
707 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,300 +1,299 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 from rhodecode.model.db import User, UserIpMap
23 from rhodecode.model.db import User, UserIpMap
24 from rhodecode.model.meta import Session
24 from rhodecode.model.meta import Session
25 from rhodecode.model.permission import PermissionModel
25 from rhodecode.model.permission import PermissionModel
26 from rhodecode.model.ssh_key import SshKeyModel
26 from rhodecode.model.ssh_key import SshKeyModel
27 from rhodecode.tests import (
27 from rhodecode.tests import (
28 TestController, clear_all_caches, assert_session_flash)
28 TestController, clear_cache_regions, assert_session_flash)
29
29
30
30
31 def route_path(name, params=None, **kwargs):
31 def route_path(name, params=None, **kwargs):
32 import urllib
32 import urllib
33 from rhodecode.apps._base import ADMIN_PREFIX
33 from rhodecode.apps._base import ADMIN_PREFIX
34
34
35 base_url = {
35 base_url = {
36 'edit_user_ips':
36 'edit_user_ips':
37 ADMIN_PREFIX + '/users/{user_id}/edit/ips',
37 ADMIN_PREFIX + '/users/{user_id}/edit/ips',
38 'edit_user_ips_add':
38 'edit_user_ips_add':
39 ADMIN_PREFIX + '/users/{user_id}/edit/ips/new',
39 ADMIN_PREFIX + '/users/{user_id}/edit/ips/new',
40 'edit_user_ips_delete':
40 'edit_user_ips_delete':
41 ADMIN_PREFIX + '/users/{user_id}/edit/ips/delete',
41 ADMIN_PREFIX + '/users/{user_id}/edit/ips/delete',
42
42
43 'admin_permissions_application':
43 'admin_permissions_application':
44 ADMIN_PREFIX + '/permissions/application',
44 ADMIN_PREFIX + '/permissions/application',
45 'admin_permissions_application_update':
45 'admin_permissions_application_update':
46 ADMIN_PREFIX + '/permissions/application/update',
46 ADMIN_PREFIX + '/permissions/application/update',
47
47
48 'admin_permissions_global':
48 'admin_permissions_global':
49 ADMIN_PREFIX + '/permissions/global',
49 ADMIN_PREFIX + '/permissions/global',
50 'admin_permissions_global_update':
50 'admin_permissions_global_update':
51 ADMIN_PREFIX + '/permissions/global/update',
51 ADMIN_PREFIX + '/permissions/global/update',
52
52
53 'admin_permissions_object':
53 'admin_permissions_object':
54 ADMIN_PREFIX + '/permissions/object',
54 ADMIN_PREFIX + '/permissions/object',
55 'admin_permissions_object_update':
55 'admin_permissions_object_update':
56 ADMIN_PREFIX + '/permissions/object/update',
56 ADMIN_PREFIX + '/permissions/object/update',
57
57
58 'admin_permissions_ips':
58 'admin_permissions_ips':
59 ADMIN_PREFIX + '/permissions/ips',
59 ADMIN_PREFIX + '/permissions/ips',
60 'admin_permissions_overview':
60 'admin_permissions_overview':
61 ADMIN_PREFIX + '/permissions/overview',
61 ADMIN_PREFIX + '/permissions/overview',
62
62
63 'admin_permissions_ssh_keys':
63 'admin_permissions_ssh_keys':
64 ADMIN_PREFIX + '/permissions/ssh_keys',
64 ADMIN_PREFIX + '/permissions/ssh_keys',
65 'admin_permissions_ssh_keys_data':
65 'admin_permissions_ssh_keys_data':
66 ADMIN_PREFIX + '/permissions/ssh_keys/data',
66 ADMIN_PREFIX + '/permissions/ssh_keys/data',
67 'admin_permissions_ssh_keys_update':
67 'admin_permissions_ssh_keys_update':
68 ADMIN_PREFIX + '/permissions/ssh_keys/update'
68 ADMIN_PREFIX + '/permissions/ssh_keys/update'
69
69
70 }[name].format(**kwargs)
70 }[name].format(**kwargs)
71
71
72 if params:
72 if params:
73 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
73 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
74 return base_url
74 return base_url
75
75
76
76
77 class TestAdminPermissionsController(TestController):
77 class TestAdminPermissionsController(TestController):
78
78
79 @pytest.fixture(scope='class', autouse=True)
79 @pytest.fixture(scope='class', autouse=True)
80 def prepare(self, request):
80 def prepare(self, request):
81 # cleanup and reset to default permissions after
81 # cleanup and reset to default permissions after
82 @request.addfinalizer
82 @request.addfinalizer
83 def cleanup():
83 def cleanup():
84 PermissionModel().create_default_user_permissions(
84 PermissionModel().create_default_user_permissions(
85 User.get_default_user(), force=True)
85 User.get_default_user(), force=True)
86
86
87 def test_index_application(self):
87 def test_index_application(self):
88 self.log_user()
88 self.log_user()
89 self.app.get(route_path('admin_permissions_application'))
89 self.app.get(route_path('admin_permissions_application'))
90
90
91 @pytest.mark.parametrize(
91 @pytest.mark.parametrize(
92 'anonymous, default_register, default_register_message, default_password_reset,'
92 'anonymous, default_register, default_register_message, default_password_reset,'
93 'default_extern_activate, expect_error, expect_form_error', [
93 'default_extern_activate, expect_error, expect_form_error', [
94 (True, 'hg.register.none', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
94 (True, 'hg.register.none', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
95 False, False),
95 False, False),
96 (True, 'hg.register.manual_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.auto',
96 (True, 'hg.register.manual_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.auto',
97 False, False),
97 False, False),
98 (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
98 (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
99 False, False),
99 False, False),
100 (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
100 (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
101 False, False),
101 False, False),
102 (True, 'hg.register.XXX', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
102 (True, 'hg.register.XXX', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
103 False, True),
103 False, True),
104 (True, '', '', 'hg.password_reset.enabled', '', True, False),
104 (True, '', '', 'hg.password_reset.enabled', '', True, False),
105 ])
105 ])
106 def test_update_application_permissions(
106 def test_update_application_permissions(
107 self, anonymous, default_register, default_register_message, default_password_reset,
107 self, anonymous, default_register, default_register_message, default_password_reset,
108 default_extern_activate, expect_error, expect_form_error):
108 default_extern_activate, expect_error, expect_form_error):
109
109
110 self.log_user()
110 self.log_user()
111
111
112 # TODO: anonymous access set here to False, breaks some other tests
112 # TODO: anonymous access set here to False, breaks some other tests
113 params = {
113 params = {
114 'csrf_token': self.csrf_token,
114 'csrf_token': self.csrf_token,
115 'anonymous': anonymous,
115 'anonymous': anonymous,
116 'default_register': default_register,
116 'default_register': default_register,
117 'default_register_message': default_register_message,
117 'default_register_message': default_register_message,
118 'default_password_reset': default_password_reset,
118 'default_password_reset': default_password_reset,
119 'default_extern_activate': default_extern_activate,
119 'default_extern_activate': default_extern_activate,
120 }
120 }
121 response = self.app.post(route_path('admin_permissions_application_update'),
121 response = self.app.post(route_path('admin_permissions_application_update'),
122 params=params)
122 params=params)
123 if expect_form_error:
123 if expect_form_error:
124 assert response.status_int == 200
124 assert response.status_int == 200
125 response.mustcontain('Value must be one of')
125 response.mustcontain('Value must be one of')
126 else:
126 else:
127 if expect_error:
127 if expect_error:
128 msg = 'Error occurred during update of permissions'
128 msg = 'Error occurred during update of permissions'
129 else:
129 else:
130 msg = 'Application permissions updated successfully'
130 msg = 'Application permissions updated successfully'
131 assert_session_flash(response, msg)
131 assert_session_flash(response, msg)
132
132
133 def test_index_object(self):
133 def test_index_object(self):
134 self.log_user()
134 self.log_user()
135 self.app.get(route_path('admin_permissions_object'))
135 self.app.get(route_path('admin_permissions_object'))
136
136
137 @pytest.mark.parametrize(
137 @pytest.mark.parametrize(
138 'repo, repo_group, user_group, expect_error, expect_form_error', [
138 'repo, repo_group, user_group, expect_error, expect_form_error', [
139 ('repository.none', 'group.none', 'usergroup.none', False, False),
139 ('repository.none', 'group.none', 'usergroup.none', False, False),
140 ('repository.read', 'group.read', 'usergroup.read', False, False),
140 ('repository.read', 'group.read', 'usergroup.read', False, False),
141 ('repository.write', 'group.write', 'usergroup.write',
141 ('repository.write', 'group.write', 'usergroup.write',
142 False, False),
142 False, False),
143 ('repository.admin', 'group.admin', 'usergroup.admin',
143 ('repository.admin', 'group.admin', 'usergroup.admin',
144 False, False),
144 False, False),
145 ('repository.XXX', 'group.admin', 'usergroup.admin', False, True),
145 ('repository.XXX', 'group.admin', 'usergroup.admin', False, True),
146 ('', '', '', True, False),
146 ('', '', '', True, False),
147 ])
147 ])
148 def test_update_object_permissions(self, repo, repo_group, user_group,
148 def test_update_object_permissions(self, repo, repo_group, user_group,
149 expect_error, expect_form_error):
149 expect_error, expect_form_error):
150 self.log_user()
150 self.log_user()
151
151
152 params = {
152 params = {
153 'csrf_token': self.csrf_token,
153 'csrf_token': self.csrf_token,
154 'default_repo_perm': repo,
154 'default_repo_perm': repo,
155 'overwrite_default_repo': False,
155 'overwrite_default_repo': False,
156 'default_group_perm': repo_group,
156 'default_group_perm': repo_group,
157 'overwrite_default_group': False,
157 'overwrite_default_group': False,
158 'default_user_group_perm': user_group,
158 'default_user_group_perm': user_group,
159 'overwrite_default_user_group': False,
159 'overwrite_default_user_group': False,
160 }
160 }
161 response = self.app.post(route_path('admin_permissions_object_update'),
161 response = self.app.post(route_path('admin_permissions_object_update'),
162 params=params)
162 params=params)
163 if expect_form_error:
163 if expect_form_error:
164 assert response.status_int == 200
164 assert response.status_int == 200
165 response.mustcontain('Value must be one of')
165 response.mustcontain('Value must be one of')
166 else:
166 else:
167 if expect_error:
167 if expect_error:
168 msg = 'Error occurred during update of permissions'
168 msg = 'Error occurred during update of permissions'
169 else:
169 else:
170 msg = 'Object permissions updated successfully'
170 msg = 'Object permissions updated successfully'
171 assert_session_flash(response, msg)
171 assert_session_flash(response, msg)
172
172
173 def test_index_global(self):
173 def test_index_global(self):
174 self.log_user()
174 self.log_user()
175 self.app.get(route_path('admin_permissions_global'))
175 self.app.get(route_path('admin_permissions_global'))
176
176
177 @pytest.mark.parametrize(
177 @pytest.mark.parametrize(
178 'repo_create, repo_create_write, user_group_create, repo_group_create,'
178 'repo_create, repo_create_write, user_group_create, repo_group_create,'
179 'fork_create, inherit_default_permissions, expect_error,'
179 'fork_create, inherit_default_permissions, expect_error,'
180 'expect_form_error', [
180 'expect_form_error', [
181 ('hg.create.none', 'hg.create.write_on_repogroup.false',
181 ('hg.create.none', 'hg.create.write_on_repogroup.false',
182 'hg.usergroup.create.false', 'hg.repogroup.create.false',
182 'hg.usergroup.create.false', 'hg.repogroup.create.false',
183 'hg.fork.none', 'hg.inherit_default_perms.false', False, False),
183 'hg.fork.none', 'hg.inherit_default_perms.false', False, False),
184 ('hg.create.repository', 'hg.create.write_on_repogroup.true',
184 ('hg.create.repository', 'hg.create.write_on_repogroup.true',
185 'hg.usergroup.create.true', 'hg.repogroup.create.true',
185 'hg.usergroup.create.true', 'hg.repogroup.create.true',
186 'hg.fork.repository', 'hg.inherit_default_perms.false',
186 'hg.fork.repository', 'hg.inherit_default_perms.false',
187 False, False),
187 False, False),
188 ('hg.create.XXX', 'hg.create.write_on_repogroup.true',
188 ('hg.create.XXX', 'hg.create.write_on_repogroup.true',
189 'hg.usergroup.create.true', 'hg.repogroup.create.true',
189 'hg.usergroup.create.true', 'hg.repogroup.create.true',
190 'hg.fork.repository', 'hg.inherit_default_perms.false',
190 'hg.fork.repository', 'hg.inherit_default_perms.false',
191 False, True),
191 False, True),
192 ('', '', '', '', '', '', True, False),
192 ('', '', '', '', '', '', True, False),
193 ])
193 ])
194 def test_update_global_permissions(
194 def test_update_global_permissions(
195 self, repo_create, repo_create_write, user_group_create,
195 self, repo_create, repo_create_write, user_group_create,
196 repo_group_create, fork_create, inherit_default_permissions,
196 repo_group_create, fork_create, inherit_default_permissions,
197 expect_error, expect_form_error):
197 expect_error, expect_form_error):
198 self.log_user()
198 self.log_user()
199
199
200 params = {
200 params = {
201 'csrf_token': self.csrf_token,
201 'csrf_token': self.csrf_token,
202 'default_repo_create': repo_create,
202 'default_repo_create': repo_create,
203 'default_repo_create_on_write': repo_create_write,
203 'default_repo_create_on_write': repo_create_write,
204 'default_user_group_create': user_group_create,
204 'default_user_group_create': user_group_create,
205 'default_repo_group_create': repo_group_create,
205 'default_repo_group_create': repo_group_create,
206 'default_fork_create': fork_create,
206 'default_fork_create': fork_create,
207 'default_inherit_default_permissions': inherit_default_permissions
207 'default_inherit_default_permissions': inherit_default_permissions
208 }
208 }
209 response = self.app.post(route_path('admin_permissions_global_update'),
209 response = self.app.post(route_path('admin_permissions_global_update'),
210 params=params)
210 params=params)
211 if expect_form_error:
211 if expect_form_error:
212 assert response.status_int == 200
212 assert response.status_int == 200
213 response.mustcontain('Value must be one of')
213 response.mustcontain('Value must be one of')
214 else:
214 else:
215 if expect_error:
215 if expect_error:
216 msg = 'Error occurred during update of permissions'
216 msg = 'Error occurred during update of permissions'
217 else:
217 else:
218 msg = 'Global permissions updated successfully'
218 msg = 'Global permissions updated successfully'
219 assert_session_flash(response, msg)
219 assert_session_flash(response, msg)
220
220
221 def test_index_ips(self):
221 def test_index_ips(self):
222 self.log_user()
222 self.log_user()
223 response = self.app.get(route_path('admin_permissions_ips'))
223 response = self.app.get(route_path('admin_permissions_ips'))
224 # TODO: Test response...
225 response.mustcontain('All IP addresses are allowed')
224 response.mustcontain('All IP addresses are allowed')
226
225
227 def test_add_delete_ips(self):
226 def test_add_delete_ips(self):
227 clear_cache_regions(['sql_cache_short'])
228 self.log_user()
228 self.log_user()
229 clear_all_caches()
230
229
231 # ADD
230 # ADD
232 default_user_id = User.get_default_user().user_id
231 default_user_id = User.get_default_user().user_id
233 self.app.post(
232 self.app.post(
234 route_path('edit_user_ips_add', user_id=default_user_id),
233 route_path('edit_user_ips_add', user_id=default_user_id),
235 params={'new_ip': '127.0.0.0/24', 'csrf_token': self.csrf_token})
234 params={'new_ip': '0.0.0.0/24', 'csrf_token': self.csrf_token})
236
235
237 response = self.app.get(route_path('admin_permissions_ips'))
236 response = self.app.get(route_path('admin_permissions_ips'))
238 response.mustcontain('127.0.0.0/24')
237 response.mustcontain('0.0.0.0/24')
239 response.mustcontain('127.0.0.0 - 127.0.0.255')
238 response.mustcontain('0.0.0.0 - 0.0.0.255')
240
239
241 # DELETE
240 # DELETE
242 default_user_id = User.get_default_user().user_id
241 default_user_id = User.get_default_user().user_id
243 del_ip_id = UserIpMap.query().filter(UserIpMap.user_id ==
242 del_ip_id = UserIpMap.query().filter(UserIpMap.user_id ==
244 default_user_id).first().ip_id
243 default_user_id).first().ip_id
245
244
246 response = self.app.post(
245 response = self.app.post(
247 route_path('edit_user_ips_delete', user_id=default_user_id),
246 route_path('edit_user_ips_delete', user_id=default_user_id),
248 params={'del_ip_id': del_ip_id, 'csrf_token': self.csrf_token})
247 params={'del_ip_id': del_ip_id, 'csrf_token': self.csrf_token})
249
248
250 assert_session_flash(response, 'Removed ip address from user whitelist')
249 assert_session_flash(response, 'Removed ip address from user whitelist')
251
250
252 clear_all_caches()
251 clear_cache_regions(['sql_cache_short'])
253 response = self.app.get(route_path('admin_permissions_ips'))
252 response = self.app.get(route_path('admin_permissions_ips'))
254 response.mustcontain('All IP addresses are allowed')
253 response.mustcontain('All IP addresses are allowed')
255 response.mustcontain(no=['127.0.0.0/24'])
254 response.mustcontain(no=['0.0.0.0/24'])
256 response.mustcontain(no=['127.0.0.0 - 127.0.0.255'])
255 response.mustcontain(no=['0.0.0.0 - 0.0.0.255'])
257
256
258 def test_index_overview(self):
257 def test_index_overview(self):
259 self.log_user()
258 self.log_user()
260 self.app.get(route_path('admin_permissions_overview'))
259 self.app.get(route_path('admin_permissions_overview'))
261
260
262 def test_ssh_keys(self):
261 def test_ssh_keys(self):
263 self.log_user()
262 self.log_user()
264 self.app.get(route_path('admin_permissions_ssh_keys'), status=200)
263 self.app.get(route_path('admin_permissions_ssh_keys'), status=200)
265
264
266 def test_ssh_keys_data(self, user_util, xhr_header):
265 def test_ssh_keys_data(self, user_util, xhr_header):
267 self.log_user()
266 self.log_user()
268 response = self.app.get(route_path('admin_permissions_ssh_keys_data'),
267 response = self.app.get(route_path('admin_permissions_ssh_keys_data'),
269 extra_environ=xhr_header)
268 extra_environ=xhr_header)
270 assert response.json == {u'data': [], u'draw': None,
269 assert response.json == {u'data': [], u'draw': None,
271 u'recordsFiltered': 0, u'recordsTotal': 0}
270 u'recordsFiltered': 0, u'recordsTotal': 0}
272
271
273 dummy_user = user_util.create_user()
272 dummy_user = user_util.create_user()
274 SshKeyModel().create(dummy_user, 'ab:cd:ef', 'KEYKEY', 'test_key')
273 SshKeyModel().create(dummy_user, 'ab:cd:ef', 'KEYKEY', 'test_key')
275 Session().commit()
274 Session().commit()
276 response = self.app.get(route_path('admin_permissions_ssh_keys_data'),
275 response = self.app.get(route_path('admin_permissions_ssh_keys_data'),
277 extra_environ=xhr_header)
276 extra_environ=xhr_header)
278 assert response.json['data'][0]['fingerprint'] == 'ab:cd:ef'
277 assert response.json['data'][0]['fingerprint'] == 'ab:cd:ef'
279
278
280 def test_ssh_keys_update(self):
279 def test_ssh_keys_update(self):
281 self.log_user()
280 self.log_user()
282 response = self.app.post(
281 response = self.app.post(
283 route_path('admin_permissions_ssh_keys_update'),
282 route_path('admin_permissions_ssh_keys_update'),
284 dict(csrf_token=self.csrf_token), status=302)
283 dict(csrf_token=self.csrf_token), status=302)
285
284
286 assert_session_flash(
285 assert_session_flash(
287 response, 'Updated SSH keys file')
286 response, 'Updated SSH keys file')
288
287
289 def test_ssh_keys_update_disabled(self):
288 def test_ssh_keys_update_disabled(self):
290 self.log_user()
289 self.log_user()
291
290
292 from rhodecode.apps.admin.views.permissions import AdminPermissionsView
291 from rhodecode.apps.admin.views.permissions import AdminPermissionsView
293 with mock.patch.object(AdminPermissionsView, 'ssh_enabled',
292 with mock.patch.object(AdminPermissionsView, 'ssh_enabled',
294 return_value=False):
293 return_value=False):
295 response = self.app.post(
294 response = self.app.post(
296 route_path('admin_permissions_ssh_keys_update'),
295 route_path('admin_permissions_ssh_keys_update'),
297 dict(csrf_token=self.csrf_token), status=302)
296 dict(csrf_token=self.csrf_token), status=302)
298
297
299 assert_session_flash(
298 assert_session_flash(
300 response, 'SSH key support is disabled in .ini file') No newline at end of file
299 response, 'SSH key support is disabled in .ini file')
@@ -1,121 +1,121 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib import helpers as h
23 from rhodecode.lib import helpers as h
24 from rhodecode.tests import (
24 from rhodecode.tests import (
25 TestController, clear_all_caches,
25 TestController, clear_cache_regions,
26 TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
26 TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
27 from rhodecode.tests.fixture import Fixture
27 from rhodecode.tests.fixture import Fixture
28 from rhodecode.tests.utils import AssertResponse
28 from rhodecode.tests.utils import AssertResponse
29
29
30 fixture = Fixture()
30 fixture = Fixture()
31
31
32
32
33 def route_path(name, params=None, **kwargs):
33 def route_path(name, params=None, **kwargs):
34 import urllib
34 import urllib
35 from rhodecode.apps._base import ADMIN_PREFIX
35 from rhodecode.apps._base import ADMIN_PREFIX
36
36
37 base_url = {
37 base_url = {
38 'login': ADMIN_PREFIX + '/login',
38 'login': ADMIN_PREFIX + '/login',
39 'logout': ADMIN_PREFIX + '/logout',
39 'logout': ADMIN_PREFIX + '/logout',
40 'register': ADMIN_PREFIX + '/register',
40 'register': ADMIN_PREFIX + '/register',
41 'reset_password':
41 'reset_password':
42 ADMIN_PREFIX + '/password_reset',
42 ADMIN_PREFIX + '/password_reset',
43 'reset_password_confirmation':
43 'reset_password_confirmation':
44 ADMIN_PREFIX + '/password_reset_confirmation',
44 ADMIN_PREFIX + '/password_reset_confirmation',
45
45
46 'admin_permissions_application':
46 'admin_permissions_application':
47 ADMIN_PREFIX + '/permissions/application',
47 ADMIN_PREFIX + '/permissions/application',
48 'admin_permissions_application_update':
48 'admin_permissions_application_update':
49 ADMIN_PREFIX + '/permissions/application/update',
49 ADMIN_PREFIX + '/permissions/application/update',
50 }[name].format(**kwargs)
50 }[name].format(**kwargs)
51
51
52 if params:
52 if params:
53 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
53 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
54 return base_url
54 return base_url
55
55
56
56
57 class TestPasswordReset(TestController):
57 class TestPasswordReset(TestController):
58
58
59 @pytest.mark.parametrize(
59 @pytest.mark.parametrize(
60 'pwd_reset_setting, show_link, show_reset', [
60 'pwd_reset_setting, show_link, show_reset', [
61 ('hg.password_reset.enabled', True, True),
61 ('hg.password_reset.enabled', True, True),
62 ('hg.password_reset.hidden', False, True),
62 ('hg.password_reset.hidden', False, True),
63 ('hg.password_reset.disabled', False, False),
63 ('hg.password_reset.disabled', False, False),
64 ])
64 ])
65 def test_password_reset_settings(
65 def test_password_reset_settings(
66 self, pwd_reset_setting, show_link, show_reset):
66 self, pwd_reset_setting, show_link, show_reset):
67 clear_all_caches()
67 clear_cache_regions()
68 self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
68 self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
69 params = {
69 params = {
70 'csrf_token': self.csrf_token,
70 'csrf_token': self.csrf_token,
71 'anonymous': 'True',
71 'anonymous': 'True',
72 'default_register': 'hg.register.auto_activate',
72 'default_register': 'hg.register.auto_activate',
73 'default_register_message': '',
73 'default_register_message': '',
74 'default_password_reset': pwd_reset_setting,
74 'default_password_reset': pwd_reset_setting,
75 'default_extern_activate': 'hg.extern_activate.auto',
75 'default_extern_activate': 'hg.extern_activate.auto',
76 }
76 }
77 resp = self.app.post(route_path('admin_permissions_application_update'), params=params)
77 resp = self.app.post(route_path('admin_permissions_application_update'), params=params)
78 self.logout_user()
78 self.logout_user()
79
79
80 login_page = self.app.get(route_path('login'))
80 login_page = self.app.get(route_path('login'))
81 asr_login = AssertResponse(login_page)
81 asr_login = AssertResponse(login_page)
82 index_page = self.app.get(h.route_path('home'))
82 index_page = self.app.get(h.route_path('home'))
83 asr_index = AssertResponse(index_page)
83 asr_index = AssertResponse(index_page)
84
84
85 if show_link:
85 if show_link:
86 asr_login.one_element_exists('a.pwd_reset')
86 asr_login.one_element_exists('a.pwd_reset')
87 asr_index.one_element_exists('a.pwd_reset')
87 asr_index.one_element_exists('a.pwd_reset')
88 else:
88 else:
89 asr_login.no_element_exists('a.pwd_reset')
89 asr_login.no_element_exists('a.pwd_reset')
90 asr_index.no_element_exists('a.pwd_reset')
90 asr_index.no_element_exists('a.pwd_reset')
91
91
92 response = self.app.get(route_path('reset_password'))
92 response = self.app.get(route_path('reset_password'))
93
93
94 assert_response = AssertResponse(response)
94 assert_response = AssertResponse(response)
95 if show_reset:
95 if show_reset:
96 response.mustcontain('Send password reset email')
96 response.mustcontain('Send password reset email')
97 assert_response.one_element_exists('#email')
97 assert_response.one_element_exists('#email')
98 assert_response.one_element_exists('#send')
98 assert_response.one_element_exists('#send')
99 else:
99 else:
100 response.mustcontain('Password reset is disabled.')
100 response.mustcontain('Password reset is disabled.')
101 assert_response.no_element_exists('#email')
101 assert_response.no_element_exists('#email')
102 assert_response.no_element_exists('#send')
102 assert_response.no_element_exists('#send')
103
103
104 def test_password_form_disabled(self):
104 def test_password_form_disabled(self):
105 self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
105 self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
106 params = {
106 params = {
107 'csrf_token': self.csrf_token,
107 'csrf_token': self.csrf_token,
108 'anonymous': 'True',
108 'anonymous': 'True',
109 'default_register': 'hg.register.auto_activate',
109 'default_register': 'hg.register.auto_activate',
110 'default_register_message': '',
110 'default_register_message': '',
111 'default_password_reset': 'hg.password_reset.disabled',
111 'default_password_reset': 'hg.password_reset.disabled',
112 'default_extern_activate': 'hg.extern_activate.auto',
112 'default_extern_activate': 'hg.extern_activate.auto',
113 }
113 }
114 self.app.post(route_path('admin_permissions_application_update'), params=params)
114 self.app.post(route_path('admin_permissions_application_update'), params=params)
115 self.logout_user()
115 self.logout_user()
116
116
117 response = self.app.post(
117 response = self.app.post(
118 route_path('reset_password'), {'email': 'lisa@rhodecode.com',}
118 route_path('reset_password'), {'email': 'lisa@rhodecode.com',}
119 )
119 )
120 response = response.follow()
120 response = response.follow()
121 response.mustcontain('Password reset is disabled.')
121 response.mustcontain('Password reset is disabled.')
@@ -1,180 +1,179 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2018 RhodeCode GmbH
3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import colander
21 import colander
22 import formencode.htmlfill
22 import formencode.htmlfill
23 import logging
23 import logging
24
24
25 from pyramid.httpexceptions import HTTPFound
25 from pyramid.httpexceptions import HTTPFound
26 from pyramid.renderers import render
26 from pyramid.renderers import render
27 from pyramid.response import Response
27 from pyramid.response import Response
28
28
29 from rhodecode.apps._base import BaseAppView
29 from rhodecode.apps._base import BaseAppView
30 from rhodecode.authentication.base import get_authn_registry
30 from rhodecode.authentication.base import get_authn_registry
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.lib.auth import (
32 from rhodecode.lib.auth import (
33 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
33 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
34 from rhodecode.lib.caches import clear_cache_manager
35 from rhodecode.model.forms import AuthSettingsForm
34 from rhodecode.model.forms import AuthSettingsForm
36 from rhodecode.model.meta import Session
35 from rhodecode.model.meta import Session
37 from rhodecode.model.settings import SettingsModel
36 from rhodecode.model.settings import SettingsModel
38
37
39 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
40
39
41
40
42 class AuthnPluginViewBase(BaseAppView):
41 class AuthnPluginViewBase(BaseAppView):
43
42
44 def load_default_context(self):
43 def load_default_context(self):
45 c = self._get_local_tmpl_context()
44 c = self._get_local_tmpl_context()
46 self.plugin = self.context.plugin
45 self.plugin = self.context.plugin
47 return c
46 return c
48
47
49 @LoginRequired()
48 @LoginRequired()
50 @HasPermissionAllDecorator('hg.admin')
49 @HasPermissionAllDecorator('hg.admin')
51 def settings_get(self, defaults=None, errors=None):
50 def settings_get(self, defaults=None, errors=None):
52 """
51 """
53 View that displays the plugin settings as a form.
52 View that displays the plugin settings as a form.
54 """
53 """
55 c = self.load_default_context()
54 c = self.load_default_context()
56 defaults = defaults or {}
55 defaults = defaults or {}
57 errors = errors or {}
56 errors = errors or {}
58 schema = self.plugin.get_settings_schema()
57 schema = self.plugin.get_settings_schema()
59
58
60 # Compute default values for the form. Priority is:
59 # Compute default values for the form. Priority is:
61 # 1. Passed to this method 2. DB value 3. Schema default
60 # 1. Passed to this method 2. DB value 3. Schema default
62 for node in schema:
61 for node in schema:
63 if node.name not in defaults:
62 if node.name not in defaults:
64 defaults[node.name] = self.plugin.get_setting_by_name(
63 defaults[node.name] = self.plugin.get_setting_by_name(
65 node.name, node.default)
64 node.name, node.default)
66
65
67 template_context = {
66 template_context = {
68 'defaults': defaults,
67 'defaults': defaults,
69 'errors': errors,
68 'errors': errors,
70 'plugin': self.context.plugin,
69 'plugin': self.context.plugin,
71 'resource': self.context,
70 'resource': self.context,
72 }
71 }
73
72
74 return self._get_template_context(c, **template_context)
73 return self._get_template_context(c, **template_context)
75
74
76 @LoginRequired()
75 @LoginRequired()
77 @HasPermissionAllDecorator('hg.admin')
76 @HasPermissionAllDecorator('hg.admin')
78 @CSRFRequired()
77 @CSRFRequired()
79 def settings_post(self):
78 def settings_post(self):
80 """
79 """
81 View that validates and stores the plugin settings.
80 View that validates and stores the plugin settings.
82 """
81 """
83 _ = self.request.translate
82 _ = self.request.translate
84 self.load_default_context()
83 self.load_default_context()
85 schema = self.plugin.get_settings_schema()
84 schema = self.plugin.get_settings_schema()
86 data = self.request.params
85 data = self.request.params
87
86
88 try:
87 try:
89 valid_data = schema.deserialize(data)
88 valid_data = schema.deserialize(data)
90 except colander.Invalid as e:
89 except colander.Invalid as e:
91 # Display error message and display form again.
90 # Display error message and display form again.
92 h.flash(
91 h.flash(
93 _('Errors exist when saving plugin settings. '
92 _('Errors exist when saving plugin settings. '
94 'Please check the form inputs.'),
93 'Please check the form inputs.'),
95 category='error')
94 category='error')
96 defaults = {key: data[key] for key in data if key in schema}
95 defaults = {key: data[key] for key in data if key in schema}
97 return self.settings_get(errors=e.asdict(), defaults=defaults)
96 return self.settings_get(errors=e.asdict(), defaults=defaults)
98
97
99 # Store validated data.
98 # Store validated data.
100 for name, value in valid_data.items():
99 for name, value in valid_data.items():
101 self.plugin.create_or_update_setting(name, value)
100 self.plugin.create_or_update_setting(name, value)
102 Session().commit()
101 Session().commit()
103
102
104 # Display success message and redirect.
103 # Display success message and redirect.
105 h.flash(_('Auth settings updated successfully.'), category='success')
104 h.flash(_('Auth settings updated successfully.'), category='success')
106 redirect_to = self.request.resource_path(
105 redirect_to = self.request.resource_path(
107 self.context, route_name='auth_home')
106 self.context, route_name='auth_home')
108 return HTTPFound(redirect_to)
107 return HTTPFound(redirect_to)
109
108
110
109
111 class AuthSettingsView(BaseAppView):
110 class AuthSettingsView(BaseAppView):
112 def load_default_context(self):
111 def load_default_context(self):
113 c = self._get_local_tmpl_context()
112 c = self._get_local_tmpl_context()
114 return c
113 return c
115
114
116 @LoginRequired()
115 @LoginRequired()
117 @HasPermissionAllDecorator('hg.admin')
116 @HasPermissionAllDecorator('hg.admin')
118 def index(self, defaults=None, errors=None, prefix_error=False):
117 def index(self, defaults=None, errors=None, prefix_error=False):
119 c = self.load_default_context()
118 c = self.load_default_context()
120
119
121 defaults = defaults or {}
120 defaults = defaults or {}
122 authn_registry = get_authn_registry(self.request.registry)
121 authn_registry = get_authn_registry(self.request.registry)
123 enabled_plugins = SettingsModel().get_auth_plugins()
122 enabled_plugins = SettingsModel().get_auth_plugins()
124
123
125 # Create template context and render it.
124 # Create template context and render it.
126 template_context = {
125 template_context = {
127 'resource': self.context,
126 'resource': self.context,
128 'available_plugins': authn_registry.get_plugins(),
127 'available_plugins': authn_registry.get_plugins(),
129 'enabled_plugins': enabled_plugins,
128 'enabled_plugins': enabled_plugins,
130 }
129 }
131 html = render('rhodecode:templates/admin/auth/auth_settings.mako',
130 html = render('rhodecode:templates/admin/auth/auth_settings.mako',
132 self._get_template_context(c, **template_context),
131 self._get_template_context(c, **template_context),
133 self.request)
132 self.request)
134
133
135 # Create form default values and fill the form.
134 # Create form default values and fill the form.
136 form_defaults = {
135 form_defaults = {
137 'auth_plugins': ',\n'.join(enabled_plugins)
136 'auth_plugins': ',\n'.join(enabled_plugins)
138 }
137 }
139 form_defaults.update(defaults)
138 form_defaults.update(defaults)
140 html = formencode.htmlfill.render(
139 html = formencode.htmlfill.render(
141 html,
140 html,
142 defaults=form_defaults,
141 defaults=form_defaults,
143 errors=errors,
142 errors=errors,
144 prefix_error=prefix_error,
143 prefix_error=prefix_error,
145 encoding="UTF-8",
144 encoding="UTF-8",
146 force_defaults=False)
145 force_defaults=False)
147
146
148 return Response(html)
147 return Response(html)
149
148
150 @LoginRequired()
149 @LoginRequired()
151 @HasPermissionAllDecorator('hg.admin')
150 @HasPermissionAllDecorator('hg.admin')
152 @CSRFRequired()
151 @CSRFRequired()
153 def auth_settings(self):
152 def auth_settings(self):
154 _ = self.request.translate
153 _ = self.request.translate
155 try:
154 try:
156 form = AuthSettingsForm(self.request.translate)()
155 form = AuthSettingsForm(self.request.translate)()
157 form_result = form.to_python(self.request.POST)
156 form_result = form.to_python(self.request.POST)
158 plugins = ','.join(form_result['auth_plugins'])
157 plugins = ','.join(form_result['auth_plugins'])
159 setting = SettingsModel().create_or_update_setting(
158 setting = SettingsModel().create_or_update_setting(
160 'auth_plugins', plugins)
159 'auth_plugins', plugins)
161 Session().add(setting)
160 Session().add(setting)
162 Session().commit()
161 Session().commit()
163
162
164 h.flash(_('Auth settings updated successfully.'), category='success')
163 h.flash(_('Auth settings updated successfully.'), category='success')
165 except formencode.Invalid as errors:
164 except formencode.Invalid as errors:
166 e = errors.error_dict or {}
165 e = errors.error_dict or {}
167 h.flash(_('Errors exist when saving plugin setting. '
166 h.flash(_('Errors exist when saving plugin setting. '
168 'Please check the form inputs.'), category='error')
167 'Please check the form inputs.'), category='error')
169 return self.index(
168 return self.index(
170 defaults=errors.value,
169 defaults=errors.value,
171 errors=e,
170 errors=e,
172 prefix_error=False)
171 prefix_error=False)
173 except Exception:
172 except Exception:
174 log.exception('Exception in auth_settings')
173 log.exception('Exception in auth_settings')
175 h.flash(_('Error occurred during update of auth settings.'),
174 h.flash(_('Error occurred during update of auth settings.'),
176 category='error')
175 category='error')
177
176
178 redirect_to = self.request.resource_path(
177 redirect_to = self.request.resource_path(
179 self.context, route_name='auth_home')
178 self.context, route_name='auth_home')
180 return HTTPFound(redirect_to)
179 return HTTPFound(redirect_to)
@@ -1,483 +1,510 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import logging
22 import logging
23 import traceback
23 import traceback
24 import collections
24 import collections
25 import tempfile
25 import tempfile
26
26
27 from paste.gzipper import make_gzip_middleware
27 from paste.gzipper import make_gzip_middleware
28 from pyramid.wsgi import wsgiapp
28 from pyramid.wsgi import wsgiapp
29 from pyramid.authorization import ACLAuthorizationPolicy
29 from pyramid.authorization import ACLAuthorizationPolicy
30 from pyramid.config import Configurator
30 from pyramid.config import Configurator
31 from pyramid.settings import asbool, aslist
31 from pyramid.settings import asbool, aslist
32 from pyramid.httpexceptions import (
32 from pyramid.httpexceptions import (
33 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
33 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
34 from pyramid.events import ApplicationCreated
34 from pyramid.events import ApplicationCreated
35 from pyramid.renderers import render_to_response
35 from pyramid.renderers import render_to_response
36
36
37 from rhodecode.model import meta
37 from rhodecode.model import meta
38 from rhodecode.config import patches
38 from rhodecode.config import patches
39 from rhodecode.config import utils as config_utils
39 from rhodecode.config import utils as config_utils
40 from rhodecode.config.environment import load_pyramid_environment
40 from rhodecode.config.environment import load_pyramid_environment
41
41
42 from rhodecode.lib.middleware.vcs import VCSMiddleware
42 from rhodecode.lib.middleware.vcs import VCSMiddleware
43 from rhodecode.lib.request import Request
43 from rhodecode.lib.request import Request
44 from rhodecode.lib.vcs import VCSCommunicationError
44 from rhodecode.lib.vcs import VCSCommunicationError
45 from rhodecode.lib.exceptions import VCSServerUnavailable
45 from rhodecode.lib.exceptions import VCSServerUnavailable
46 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
46 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 from rhodecode.lib.celerylib.loader import configure_celery
48 from rhodecode.lib.celerylib.loader import configure_celery
49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
50 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
51 from rhodecode.subscribers import (
51 from rhodecode.subscribers import (
52 scan_repositories_if_enabled, write_js_routes_if_enabled,
52 scan_repositories_if_enabled, write_js_routes_if_enabled,
53 write_metadata_if_needed, inject_app_settings)
53 write_metadata_if_needed, inject_app_settings)
54
54
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def is_http_error(response):
59 def is_http_error(response):
60 # error which should have traceback
60 # error which should have traceback
61 return response.status_code > 499
61 return response.status_code > 499
62
62
63
63
64 def make_pyramid_app(global_config, **settings):
64 def make_pyramid_app(global_config, **settings):
65 """
65 """
66 Constructs the WSGI application based on Pyramid.
66 Constructs the WSGI application based on Pyramid.
67
67
68 Specials:
68 Specials:
69
69
70 * The application can also be integrated like a plugin via the call to
70 * The application can also be integrated like a plugin via the call to
71 `includeme`. This is accompanied with the other utility functions which
71 `includeme`. This is accompanied with the other utility functions which
72 are called. Changing this should be done with great care to not break
72 are called. Changing this should be done with great care to not break
73 cases when these fragments are assembled from another place.
73 cases when these fragments are assembled from another place.
74
74
75 """
75 """
76
76
77 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
77 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
78 # will be replaced by the value of the environment variable "NAME" in this case.
78 # will be replaced by the value of the environment variable "NAME" in this case.
79 environ = {
79 environ = {
80 'ENV_{}'.format(key): value for key, value in os.environ.items()}
80 'ENV_{}'.format(key): value for key, value in os.environ.items()}
81
81
82 global_config = _substitute_values(global_config, environ)
82 global_config = _substitute_values(global_config, environ)
83 settings = _substitute_values(settings, environ)
83 settings = _substitute_values(settings, environ)
84
84
85 sanitize_settings_and_apply_defaults(settings)
85 sanitize_settings_and_apply_defaults(settings)
86
86
87 config = Configurator(settings=settings)
87 config = Configurator(settings=settings)
88
88
89 # Apply compatibility patches
89 # Apply compatibility patches
90 patches.inspect_getargspec()
90 patches.inspect_getargspec()
91
91
92 load_pyramid_environment(global_config, settings)
92 load_pyramid_environment(global_config, settings)
93
93
94 # Static file view comes first
94 # Static file view comes first
95 includeme_first(config)
95 includeme_first(config)
96
96
97 includeme(config)
97 includeme(config)
98
98
99 pyramid_app = config.make_wsgi_app()
99 pyramid_app = config.make_wsgi_app()
100 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
100 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
101 pyramid_app.config = config
101 pyramid_app.config = config
102
102
103 config.configure_celery(global_config['__file__'])
103 config.configure_celery(global_config['__file__'])
104 # creating the app uses a connection - return it after we are done
104 # creating the app uses a connection - return it after we are done
105 meta.Session.remove()
105 meta.Session.remove()
106
106
107 log.info('Pyramid app %s created and configured.', pyramid_app)
107 log.info('Pyramid app %s created and configured.', pyramid_app)
108 return pyramid_app
108 return pyramid_app
109
109
110
110
111 def not_found_view(request):
111 def not_found_view(request):
112 """
112 """
113 This creates the view which should be registered as not-found-view to
113 This creates the view which should be registered as not-found-view to
114 pyramid.
114 pyramid.
115 """
115 """
116
116
117 if not getattr(request, 'vcs_call', None):
117 if not getattr(request, 'vcs_call', None):
118 # handle like regular case with our error_handler
118 # handle like regular case with our error_handler
119 return error_handler(HTTPNotFound(), request)
119 return error_handler(HTTPNotFound(), request)
120
120
121 # handle not found view as a vcs call
121 # handle not found view as a vcs call
122 settings = request.registry.settings
122 settings = request.registry.settings
123 ae_client = getattr(request, 'ae_client', None)
123 ae_client = getattr(request, 'ae_client', None)
124 vcs_app = VCSMiddleware(
124 vcs_app = VCSMiddleware(
125 HTTPNotFound(), request.registry, settings,
125 HTTPNotFound(), request.registry, settings,
126 appenlight_client=ae_client)
126 appenlight_client=ae_client)
127
127
128 return wsgiapp(vcs_app)(None, request)
128 return wsgiapp(vcs_app)(None, request)
129
129
130
130
131 def error_handler(exception, request):
131 def error_handler(exception, request):
132 import rhodecode
132 import rhodecode
133 from rhodecode.lib import helpers
133 from rhodecode.lib import helpers
134
134
135 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
135 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
136
136
137 base_response = HTTPInternalServerError()
137 base_response = HTTPInternalServerError()
138 # prefer original exception for the response since it may have headers set
138 # prefer original exception for the response since it may have headers set
139 if isinstance(exception, HTTPException):
139 if isinstance(exception, HTTPException):
140 base_response = exception
140 base_response = exception
141 elif isinstance(exception, VCSCommunicationError):
141 elif isinstance(exception, VCSCommunicationError):
142 base_response = VCSServerUnavailable()
142 base_response = VCSServerUnavailable()
143
143
144 if is_http_error(base_response):
144 if is_http_error(base_response):
145 log.exception(
145 log.exception(
146 'error occurred handling this request for path: %s', request.path)
146 'error occurred handling this request for path: %s', request.path)
147
147
148 error_explanation = base_response.explanation or str(base_response)
148 error_explanation = base_response.explanation or str(base_response)
149 if base_response.status_code == 404:
149 if base_response.status_code == 404:
150 error_explanation += " Or you don't have permission to access it."
150 error_explanation += " Or you don't have permission to access it."
151 c = AttributeDict()
151 c = AttributeDict()
152 c.error_message = base_response.status
152 c.error_message = base_response.status
153 c.error_explanation = error_explanation
153 c.error_explanation = error_explanation
154 c.visual = AttributeDict()
154 c.visual = AttributeDict()
155
155
156 c.visual.rhodecode_support_url = (
156 c.visual.rhodecode_support_url = (
157 request.registry.settings.get('rhodecode_support_url') or
157 request.registry.settings.get('rhodecode_support_url') or
158 request.route_url('rhodecode_support')
158 request.route_url('rhodecode_support')
159 )
159 )
160 c.redirect_time = 0
160 c.redirect_time = 0
161 c.rhodecode_name = rhodecode_title
161 c.rhodecode_name = rhodecode_title
162 if not c.rhodecode_name:
162 if not c.rhodecode_name:
163 c.rhodecode_name = 'Rhodecode'
163 c.rhodecode_name = 'Rhodecode'
164
164
165 c.causes = []
165 c.causes = []
166 if is_http_error(base_response):
166 if is_http_error(base_response):
167 c.causes.append('Server is overloaded.')
167 c.causes.append('Server is overloaded.')
168 c.causes.append('Server database connection is lost.')
168 c.causes.append('Server database connection is lost.')
169 c.causes.append('Server expected unhandled error.')
169 c.causes.append('Server expected unhandled error.')
170
170
171 if hasattr(base_response, 'causes'):
171 if hasattr(base_response, 'causes'):
172 c.causes = base_response.causes
172 c.causes = base_response.causes
173
173
174 c.messages = helpers.flash.pop_messages(request=request)
174 c.messages = helpers.flash.pop_messages(request=request)
175 c.traceback = traceback.format_exc()
175 c.traceback = traceback.format_exc()
176 response = render_to_response(
176 response = render_to_response(
177 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
177 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
178 response=base_response)
178 response=base_response)
179
179
180 return response
180 return response
181
181
182
182
183 def includeme_first(config):
183 def includeme_first(config):
184 # redirect automatic browser favicon.ico requests to correct place
184 # redirect automatic browser favicon.ico requests to correct place
185 def favicon_redirect(context, request):
185 def favicon_redirect(context, request):
186 return HTTPFound(
186 return HTTPFound(
187 request.static_path('rhodecode:public/images/favicon.ico'))
187 request.static_path('rhodecode:public/images/favicon.ico'))
188
188
189 config.add_view(favicon_redirect, route_name='favicon')
189 config.add_view(favicon_redirect, route_name='favicon')
190 config.add_route('favicon', '/favicon.ico')
190 config.add_route('favicon', '/favicon.ico')
191
191
192 def robots_redirect(context, request):
192 def robots_redirect(context, request):
193 return HTTPFound(
193 return HTTPFound(
194 request.static_path('rhodecode:public/robots.txt'))
194 request.static_path('rhodecode:public/robots.txt'))
195
195
196 config.add_view(robots_redirect, route_name='robots')
196 config.add_view(robots_redirect, route_name='robots')
197 config.add_route('robots', '/robots.txt')
197 config.add_route('robots', '/robots.txt')
198
198
199 config.add_static_view(
199 config.add_static_view(
200 '_static/deform', 'deform:static')
200 '_static/deform', 'deform:static')
201 config.add_static_view(
201 config.add_static_view(
202 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
202 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
203
203
204
204
205 def includeme(config):
205 def includeme(config):
206 settings = config.registry.settings
206 settings = config.registry.settings
207 config.set_request_factory(Request)
207 config.set_request_factory(Request)
208
208
209 # plugin information
209 # plugin information
210 config.registry.rhodecode_plugins = collections.OrderedDict()
210 config.registry.rhodecode_plugins = collections.OrderedDict()
211
211
212 config.add_directive(
212 config.add_directive(
213 'register_rhodecode_plugin', register_rhodecode_plugin)
213 'register_rhodecode_plugin', register_rhodecode_plugin)
214
214
215 config.add_directive('configure_celery', configure_celery)
215 config.add_directive('configure_celery', configure_celery)
216
216
217 if asbool(settings.get('appenlight', 'false')):
217 if asbool(settings.get('appenlight', 'false')):
218 config.include('appenlight_client.ext.pyramid_tween')
218 config.include('appenlight_client.ext.pyramid_tween')
219
219
220 # Includes which are required. The application would fail without them.
220 # Includes which are required. The application would fail without them.
221 config.include('pyramid_mako')
221 config.include('pyramid_mako')
222 config.include('pyramid_beaker')
222 config.include('pyramid_beaker')
223 config.include('rhodecode.lib.caches')
223 config.include('rhodecode.lib.caches')
224 config.include('rhodecode.lib.rc_cache')
224 config.include('rhodecode.lib.rc_cache')
225
225
226 config.include('rhodecode.authentication')
226 config.include('rhodecode.authentication')
227 config.include('rhodecode.integrations')
227 config.include('rhodecode.integrations')
228
228
229 # apps
229 # apps
230 config.include('rhodecode.apps._base')
230 config.include('rhodecode.apps._base')
231 config.include('rhodecode.apps.ops')
231 config.include('rhodecode.apps.ops')
232
232
233 config.include('rhodecode.apps.admin')
233 config.include('rhodecode.apps.admin')
234 config.include('rhodecode.apps.channelstream')
234 config.include('rhodecode.apps.channelstream')
235 config.include('rhodecode.apps.login')
235 config.include('rhodecode.apps.login')
236 config.include('rhodecode.apps.home')
236 config.include('rhodecode.apps.home')
237 config.include('rhodecode.apps.journal')
237 config.include('rhodecode.apps.journal')
238 config.include('rhodecode.apps.repository')
238 config.include('rhodecode.apps.repository')
239 config.include('rhodecode.apps.repo_group')
239 config.include('rhodecode.apps.repo_group')
240 config.include('rhodecode.apps.user_group')
240 config.include('rhodecode.apps.user_group')
241 config.include('rhodecode.apps.search')
241 config.include('rhodecode.apps.search')
242 config.include('rhodecode.apps.user_profile')
242 config.include('rhodecode.apps.user_profile')
243 config.include('rhodecode.apps.user_group_profile')
243 config.include('rhodecode.apps.user_group_profile')
244 config.include('rhodecode.apps.my_account')
244 config.include('rhodecode.apps.my_account')
245 config.include('rhodecode.apps.svn_support')
245 config.include('rhodecode.apps.svn_support')
246 config.include('rhodecode.apps.ssh_support')
246 config.include('rhodecode.apps.ssh_support')
247 config.include('rhodecode.apps.gist')
247 config.include('rhodecode.apps.gist')
248
248
249 config.include('rhodecode.apps.debug_style')
249 config.include('rhodecode.apps.debug_style')
250 config.include('rhodecode.tweens')
250 config.include('rhodecode.tweens')
251 config.include('rhodecode.api')
251 config.include('rhodecode.api')
252
252
253 config.add_route(
253 config.add_route(
254 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
254 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
255
255
256 config.add_translation_dirs('rhodecode:i18n/')
256 config.add_translation_dirs('rhodecode:i18n/')
257 settings['default_locale_name'] = settings.get('lang', 'en')
257 settings['default_locale_name'] = settings.get('lang', 'en')
258
258
259 # Add subscribers.
259 # Add subscribers.
260 config.add_subscriber(inject_app_settings, ApplicationCreated)
260 config.add_subscriber(inject_app_settings, ApplicationCreated)
261 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
261 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
262 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
262 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
263 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
263 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
264
264
265 # events
265 # events
266 # TODO(marcink): this should be done when pyramid migration is finished
266 # TODO(marcink): this should be done when pyramid migration is finished
267 # config.add_subscriber(
267 # config.add_subscriber(
268 # 'rhodecode.integrations.integrations_event_handler',
268 # 'rhodecode.integrations.integrations_event_handler',
269 # 'rhodecode.events.RhodecodeEvent')
269 # 'rhodecode.events.RhodecodeEvent')
270
270
271 # request custom methods
271 # request custom methods
272 config.add_request_method(
272 config.add_request_method(
273 'rhodecode.lib.partial_renderer.get_partial_renderer',
273 'rhodecode.lib.partial_renderer.get_partial_renderer',
274 'get_partial_renderer')
274 'get_partial_renderer')
275
275
276 # Set the authorization policy.
276 # Set the authorization policy.
277 authz_policy = ACLAuthorizationPolicy()
277 authz_policy = ACLAuthorizationPolicy()
278 config.set_authorization_policy(authz_policy)
278 config.set_authorization_policy(authz_policy)
279
279
280 # Set the default renderer for HTML templates to mako.
280 # Set the default renderer for HTML templates to mako.
281 config.add_mako_renderer('.html')
281 config.add_mako_renderer('.html')
282
282
283 config.add_renderer(
283 config.add_renderer(
284 name='json_ext',
284 name='json_ext',
285 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
285 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
286
286
287 # include RhodeCode plugins
287 # include RhodeCode plugins
288 includes = aslist(settings.get('rhodecode.includes', []))
288 includes = aslist(settings.get('rhodecode.includes', []))
289 for inc in includes:
289 for inc in includes:
290 config.include(inc)
290 config.include(inc)
291
291
292 # custom not found view, if our pyramid app doesn't know how to handle
292 # custom not found view, if our pyramid app doesn't know how to handle
293 # the request pass it to potential VCS handling ap
293 # the request pass it to potential VCS handling ap
294 config.add_notfound_view(not_found_view)
294 config.add_notfound_view(not_found_view)
295 if not settings.get('debugtoolbar.enabled', False):
295 if not settings.get('debugtoolbar.enabled', False):
296 # disabled debugtoolbar handle all exceptions via the error_handlers
296 # disabled debugtoolbar handle all exceptions via the error_handlers
297 config.add_view(error_handler, context=Exception)
297 config.add_view(error_handler, context=Exception)
298
298
299 # all errors including 403/404/50X
299 # all errors including 403/404/50X
300 config.add_view(error_handler, context=HTTPError)
300 config.add_view(error_handler, context=HTTPError)
301
301
302
302
303 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
303 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
304 """
304 """
305 Apply outer WSGI middlewares around the application.
305 Apply outer WSGI middlewares around the application.
306 """
306 """
307 settings = config.registry.settings
307 settings = config.registry.settings
308
308
309 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
309 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
310 pyramid_app = HttpsFixup(pyramid_app, settings)
310 pyramid_app = HttpsFixup(pyramid_app, settings)
311
311
312 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
312 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
313 pyramid_app, settings)
313 pyramid_app, settings)
314 config.registry.ae_client = _ae_client
314 config.registry.ae_client = _ae_client
315
315
316 if settings['gzip_responses']:
316 if settings['gzip_responses']:
317 pyramid_app = make_gzip_middleware(
317 pyramid_app = make_gzip_middleware(
318 pyramid_app, settings, compress_level=1)
318 pyramid_app, settings, compress_level=1)
319
319
320 # this should be the outer most middleware in the wsgi stack since
320 # this should be the outer most middleware in the wsgi stack since
321 # middleware like Routes make database calls
321 # middleware like Routes make database calls
322 def pyramid_app_with_cleanup(environ, start_response):
322 def pyramid_app_with_cleanup(environ, start_response):
323 try:
323 try:
324 return pyramid_app(environ, start_response)
324 return pyramid_app(environ, start_response)
325 finally:
325 finally:
326 # Dispose current database session and rollback uncommitted
326 # Dispose current database session and rollback uncommitted
327 # transactions.
327 # transactions.
328 meta.Session.remove()
328 meta.Session.remove()
329
329
330 # In a single threaded mode server, on non sqlite db we should have
330 # In a single threaded mode server, on non sqlite db we should have
331 # '0 Current Checked out connections' at the end of a request,
331 # '0 Current Checked out connections' at the end of a request,
332 # if not, then something, somewhere is leaving a connection open
332 # if not, then something, somewhere is leaving a connection open
333 pool = meta.Base.metadata.bind.engine.pool
333 pool = meta.Base.metadata.bind.engine.pool
334 log.debug('sa pool status: %s', pool.status())
334 log.debug('sa pool status: %s', pool.status())
335
335
336 return pyramid_app_with_cleanup
336 return pyramid_app_with_cleanup
337
337
338
338
339 def sanitize_settings_and_apply_defaults(settings):
339 def sanitize_settings_and_apply_defaults(settings):
340 """
340 """
341 Applies settings defaults and does all type conversion.
341 Applies settings defaults and does all type conversion.
342
342
343 We would move all settings parsing and preparation into this place, so that
343 We would move all settings parsing and preparation into this place, so that
344 we have only one place left which deals with this part. The remaining parts
344 we have only one place left which deals with this part. The remaining parts
345 of the application would start to rely fully on well prepared settings.
345 of the application would start to rely fully on well prepared settings.
346
346
347 This piece would later be split up per topic to avoid a big fat monster
347 This piece would later be split up per topic to avoid a big fat monster
348 function.
348 function.
349 """
349 """
350
350
351 settings.setdefault('rhodecode.edition', 'Community Edition')
351 settings.setdefault('rhodecode.edition', 'Community Edition')
352
352
353 if 'mako.default_filters' not in settings:
353 if 'mako.default_filters' not in settings:
354 # set custom default filters if we don't have it defined
354 # set custom default filters if we don't have it defined
355 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
355 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
356 settings['mako.default_filters'] = 'h_filter'
356 settings['mako.default_filters'] = 'h_filter'
357
357
358 if 'mako.directories' not in settings:
358 if 'mako.directories' not in settings:
359 mako_directories = settings.setdefault('mako.directories', [
359 mako_directories = settings.setdefault('mako.directories', [
360 # Base templates of the original application
360 # Base templates of the original application
361 'rhodecode:templates',
361 'rhodecode:templates',
362 ])
362 ])
363 log.debug(
363 log.debug(
364 "Using the following Mako template directories: %s",
364 "Using the following Mako template directories: %s",
365 mako_directories)
365 mako_directories)
366
366
367 # Default includes, possible to change as a user
367 # Default includes, possible to change as a user
368 pyramid_includes = settings.setdefault('pyramid.includes', [
368 pyramid_includes = settings.setdefault('pyramid.includes', [
369 'rhodecode.lib.middleware.request_wrapper',
369 'rhodecode.lib.middleware.request_wrapper',
370 ])
370 ])
371 log.debug(
371 log.debug(
372 "Using the following pyramid.includes: %s",
372 "Using the following pyramid.includes: %s",
373 pyramid_includes)
373 pyramid_includes)
374
374
375 # TODO: johbo: Re-think this, usually the call to config.include
375 # TODO: johbo: Re-think this, usually the call to config.include
376 # should allow to pass in a prefix.
376 # should allow to pass in a prefix.
377 settings.setdefault('rhodecode.api.url', '/_admin/api')
377 settings.setdefault('rhodecode.api.url', '/_admin/api')
378
378
379 # Sanitize generic settings.
379 # Sanitize generic settings.
380 _list_setting(settings, 'default_encoding', 'UTF-8')
380 _list_setting(settings, 'default_encoding', 'UTF-8')
381 _bool_setting(settings, 'is_test', 'false')
381 _bool_setting(settings, 'is_test', 'false')
382 _bool_setting(settings, 'gzip_responses', 'false')
382 _bool_setting(settings, 'gzip_responses', 'false')
383
383
384 # Call split out functions that sanitize settings for each topic.
384 # Call split out functions that sanitize settings for each topic.
385 _sanitize_appenlight_settings(settings)
385 _sanitize_appenlight_settings(settings)
386 _sanitize_vcs_settings(settings)
386 _sanitize_vcs_settings(settings)
387 _sanitize_cache_settings(settings)
387 _sanitize_cache_settings(settings)
388
388
389 # configure instance id
389 # configure instance id
390 config_utils.set_instance_id(settings)
390 config_utils.set_instance_id(settings)
391
391
392 return settings
392 return settings
393
393
394
394
395 def _sanitize_appenlight_settings(settings):
395 def _sanitize_appenlight_settings(settings):
396 _bool_setting(settings, 'appenlight', 'false')
396 _bool_setting(settings, 'appenlight', 'false')
397
397
398
398
399 def _sanitize_vcs_settings(settings):
399 def _sanitize_vcs_settings(settings):
400 """
400 """
401 Applies settings defaults and does type conversion for all VCS related
401 Applies settings defaults and does type conversion for all VCS related
402 settings.
402 settings.
403 """
403 """
404 _string_setting(settings, 'vcs.svn.compatible_version', '')
404 _string_setting(settings, 'vcs.svn.compatible_version', '')
405 _string_setting(settings, 'git_rev_filter', '--all')
405 _string_setting(settings, 'git_rev_filter', '--all')
406 _string_setting(settings, 'vcs.hooks.protocol', 'http')
406 _string_setting(settings, 'vcs.hooks.protocol', 'http')
407 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
407 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
408 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
408 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
409 _string_setting(settings, 'vcs.server', '')
409 _string_setting(settings, 'vcs.server', '')
410 _string_setting(settings, 'vcs.server.log_level', 'debug')
410 _string_setting(settings, 'vcs.server.log_level', 'debug')
411 _string_setting(settings, 'vcs.server.protocol', 'http')
411 _string_setting(settings, 'vcs.server.protocol', 'http')
412 _bool_setting(settings, 'startup.import_repos', 'false')
412 _bool_setting(settings, 'startup.import_repos', 'false')
413 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
413 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
414 _bool_setting(settings, 'vcs.server.enable', 'true')
414 _bool_setting(settings, 'vcs.server.enable', 'true')
415 _bool_setting(settings, 'vcs.start_server', 'false')
415 _bool_setting(settings, 'vcs.start_server', 'false')
416 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
416 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
417 _int_setting(settings, 'vcs.connection_timeout', 3600)
417 _int_setting(settings, 'vcs.connection_timeout', 3600)
418
418
419 # Support legacy values of vcs.scm_app_implementation. Legacy
419 # Support legacy values of vcs.scm_app_implementation. Legacy
420 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
420 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
421 # which is now mapped to 'http'.
421 # which is now mapped to 'http'.
422 scm_app_impl = settings['vcs.scm_app_implementation']
422 scm_app_impl = settings['vcs.scm_app_implementation']
423 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
423 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
424 settings['vcs.scm_app_implementation'] = 'http'
424 settings['vcs.scm_app_implementation'] = 'http'
425
425
426
426
427 def _sanitize_cache_settings(settings):
427 def _sanitize_cache_settings(settings):
428 _string_setting(settings, 'cache_dir',
428 _string_setting(settings, 'cache_dir',
429 os.path.join(tempfile.gettempdir(), 'rc_cache'))
429 os.path.join(tempfile.gettempdir(), 'rc_cache'))
430
430 # cache_perms
431 _string_setting(settings, 'rc_cache.cache_perms.backend',
431 _string_setting(
432 settings,
433 'rc_cache.cache_perms.backend',
432 'dogpile.cache.rc.file_namespace')
434 'dogpile.cache.rc.file_namespace')
433 _int_setting(settings, 'rc_cache.cache_perms.expiration_time',
435 _int_setting(
436 settings,
437 'rc_cache.cache_perms.expiration_time',
434 60)
438 60)
435 _string_setting(settings, 'rc_cache.cache_perms.arguments.filename',
439 _string_setting(
440 settings,
441 'rc_cache.cache_perms.arguments.filename',
436 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
442 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
437
443
438 _string_setting(settings, 'rc_cache.cache_repo.backend',
444 # cache_repo
445 _string_setting(
446 settings,
447 'rc_cache.cache_repo.backend',
439 'dogpile.cache.rc.file_namespace')
448 'dogpile.cache.rc.file_namespace')
440 _int_setting(settings, 'rc_cache.cache_repo.expiration_time',
449 _int_setting(
450 settings,
451 'rc_cache.cache_repo.expiration_time',
441 60)
452 60)
442 _string_setting(settings, 'rc_cache.cache_repo.arguments.filename',
453 _string_setting(
454 settings,
455 'rc_cache.cache_repo.arguments.filename',
443 os.path.join(tempfile.gettempdir(), 'rc_cache_2'))
456 os.path.join(tempfile.gettempdir(), 'rc_cache_2'))
444
457
458 # sql_cache_short
459 _string_setting(
460 settings,
461 'rc_cache.sql_cache_short.backend',
462 'dogpile.cache.rc.memory_lru')
463 _int_setting(
464 settings,
465 'rc_cache.sql_cache_short.expiration_time',
466 30)
467 _int_setting(
468 settings,
469 'rc_cache.sql_cache_short.max_size',
470 10000)
471
445
472
446 def _int_setting(settings, name, default):
473 def _int_setting(settings, name, default):
447 settings[name] = int(settings.get(name, default))
474 settings[name] = int(settings.get(name, default))
448
475
449
476
450 def _bool_setting(settings, name, default):
477 def _bool_setting(settings, name, default):
451 input_val = settings.get(name, default)
478 input_val = settings.get(name, default)
452 if isinstance(input_val, unicode):
479 if isinstance(input_val, unicode):
453 input_val = input_val.encode('utf8')
480 input_val = input_val.encode('utf8')
454 settings[name] = asbool(input_val)
481 settings[name] = asbool(input_val)
455
482
456
483
457 def _list_setting(settings, name, default):
484 def _list_setting(settings, name, default):
458 raw_value = settings.get(name, default)
485 raw_value = settings.get(name, default)
459
486
460 old_separator = ','
487 old_separator = ','
461 if old_separator in raw_value:
488 if old_separator in raw_value:
462 # If we get a comma separated list, pass it to our own function.
489 # If we get a comma separated list, pass it to our own function.
463 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
490 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
464 else:
491 else:
465 # Otherwise we assume it uses pyramids space/newline separation.
492 # Otherwise we assume it uses pyramids space/newline separation.
466 settings[name] = aslist(raw_value)
493 settings[name] = aslist(raw_value)
467
494
468
495
469 def _string_setting(settings, name, default, lower=True):
496 def _string_setting(settings, name, default, lower=True):
470 value = settings.get(name, default)
497 value = settings.get(name, default)
471 if lower:
498 if lower:
472 value = value.lower()
499 value = value.lower()
473 settings[name] = value
500 settings[name] = value
474
501
475
502
476 def _substitute_values(mapping, substitutions):
503 def _substitute_values(mapping, substitutions):
477 result = {
504 result = {
478 # Note: Cannot use regular replacements, since they would clash
505 # Note: Cannot use regular replacements, since they would clash
479 # with the implementation of ConfigParser. Using "format" instead.
506 # with the implementation of ConfigParser. Using "format" instead.
480 key: value.format(**substitutions)
507 key: value.format(**substitutions)
481 for key, value in mapping.items()
508 for key, value in mapping.items()
482 }
509 }
483 return result
510 return result
@@ -1,226 +1,188 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2018 RhodeCode GmbH
3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import functools
20 import functools
21
21
22 import beaker
22 import beaker
23 import logging
23 import logging
24 import threading
24 import threading
25
25
26 from beaker.cache import _cache_decorate, cache_regions, region_invalidate
26 from beaker.cache import _cache_decorate, region_invalidate
27 from sqlalchemy.exc import IntegrityError
27 from sqlalchemy.exc import IntegrityError
28
28
29 from rhodecode.lib.utils import safe_str, sha1
29 from rhodecode.lib.utils import safe_str, sha1
30 from rhodecode.model.db import Session, CacheKey
30 from rhodecode.model.db import Session, CacheKey
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 DEFAULT_CACHE_MANAGER_CONFIG = {
35 DEFAULT_CACHE_MANAGER_CONFIG = {
36 'type': 'memorylru_base',
36 'type': 'memorylru_base',
37 'max_items': 10240,
37 'max_items': 10240,
38 'key_length': 256,
38 'key_length': 256,
39 'enabled': True
39 'enabled': True
40 }
40 }
41
41
42
42
43 def get_default_cache_settings(settings):
43 def get_default_cache_settings(settings):
44 cache_settings = {}
44 cache_settings = {}
45 for key in settings.keys():
45 for key in settings.keys():
46 for prefix in ['beaker.cache.', 'cache.']:
46 for prefix in ['beaker.cache.', 'cache.']:
47 if key.startswith(prefix):
47 if key.startswith(prefix):
48 name = key.split(prefix)[1].strip()
48 name = key.split(prefix)[1].strip()
49 cache_settings[name] = settings[key].strip()
49 cache_settings[name] = settings[key].strip()
50 return cache_settings
50 return cache_settings
51
51
52
52
53 # set cache regions for beaker so celery can utilise it
53 # set cache regions for beaker so celery can utilise it
54 def configure_caches(settings, default_region_settings=None):
54 def configure_caches(settings, default_region_settings=None):
55 cache_settings = {'regions': None}
55 cache_settings = {'regions': None}
56 # main cache settings used as default ...
56 # main cache settings used as default ...
57 cache_settings.update(get_default_cache_settings(settings))
57 cache_settings.update(get_default_cache_settings(settings))
58 default_region_settings = default_region_settings or \
58 default_region_settings = default_region_settings or \
59 {'type': DEFAULT_CACHE_MANAGER_CONFIG['type']}
59 {'type': DEFAULT_CACHE_MANAGER_CONFIG['type']}
60 if cache_settings['regions']:
60 if cache_settings['regions']:
61 for region in cache_settings['regions'].split(','):
61 for region in cache_settings['regions'].split(','):
62 region = region.strip()
62 region = region.strip()
63 region_settings = default_region_settings.copy()
63 region_settings = default_region_settings.copy()
64 for key, value in cache_settings.items():
64 for key, value in cache_settings.items():
65 if key.startswith(region):
65 if key.startswith(region):
66 region_settings[key.split(region + '.')[-1]] = value
66 region_settings[key.split(region + '.')[-1]] = value
67 log.debug('Configuring cache region `%s` with settings %s',
67 log.debug('Configuring cache region `%s` with settings %s',
68 region, region_settings)
68 region, region_settings)
69 configure_cache_region(
69 configure_cache_region(
70 region, region_settings, cache_settings)
70 region, region_settings, cache_settings)
71
71
72
72
73 def configure_cache_region(
73 def configure_cache_region(
74 region_name, region_settings, default_cache_kw, default_expire=60):
74 region_name, region_settings, default_cache_kw, default_expire=60):
75 default_type = default_cache_kw.get('type', 'memory')
75 default_type = default_cache_kw.get('type', 'memory')
76 default_lock_dir = default_cache_kw.get('lock_dir')
76 default_lock_dir = default_cache_kw.get('lock_dir')
77 default_data_dir = default_cache_kw.get('data_dir')
77 default_data_dir = default_cache_kw.get('data_dir')
78
78
79 region_settings['lock_dir'] = region_settings.get('lock_dir', default_lock_dir)
79 region_settings['lock_dir'] = region_settings.get('lock_dir', default_lock_dir)
80 region_settings['data_dir'] = region_settings.get('data_dir', default_data_dir)
80 region_settings['data_dir'] = region_settings.get('data_dir', default_data_dir)
81 region_settings['type'] = region_settings.get('type', default_type)
81 region_settings['type'] = region_settings.get('type', default_type)
82 region_settings['expire'] = int(region_settings.get('expire', default_expire))
82 region_settings['expire'] = int(region_settings.get('expire', default_expire))
83
83
84 beaker.cache.cache_regions[region_name] = region_settings
84 beaker.cache.cache_regions[region_name] = region_settings
85
85
86
86
87 def get_cache_manager(region_name, cache_name, custom_ttl=None):
88 """
89 Creates a Beaker cache manager. Such instance can be used like that::
90
91 _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name)
92 cache_manager = caches.get_cache_manager('some_namespace_name', _namespace)
93 _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id)
94 def heavy_compute():
95 ...
96 result = cache_manager.get(_cache_key, createfunc=heavy_compute)
97
98 :param region_name: region from ini file
99 :param cache_name: custom cache name, usually prefix+repo_name. eg
100 file_switcher_repo1
101 :param custom_ttl: override .ini file timeout on this cache
102 :return: instance of cache manager
103 """
104
105 cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG)
106 if custom_ttl:
107 log.debug('Updating region %s with custom ttl: %s',
108 region_name, custom_ttl)
109 cache_config.update({'expire': custom_ttl})
110
111 return beaker.cache.Cache._get_cache(cache_name, cache_config)
112
113
114 def clear_cache_manager(cache_manager):
115 """
116 namespace = 'foobar'
117 cache_manager = get_cache_manager('some_namespace_name', namespace)
118 clear_cache_manager(cache_manager)
119 """
120
121 log.debug('Clearing all values for cache manager %s', cache_manager)
122 cache_manager.clear()
123
124
125 def compute_key_from_params(*args):
87 def compute_key_from_params(*args):
126 """
88 """
127 Helper to compute key from given params to be used in cache manager
89 Helper to compute key from given params to be used in cache manager
128 """
90 """
129 return sha1("_".join(map(safe_str, args)))
91 return sha1("_".join(map(safe_str, args)))
130
92
131
93
132 def get_repo_namespace_key(prefix, repo_name):
94 def get_repo_namespace_key(prefix, repo_name):
133 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
95 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
134
96
135
97
136 class ActiveRegionCache(object):
98 class ActiveRegionCache(object):
137 def __init__(self, context):
99 def __init__(self, context):
138 self.context = context
100 self.context = context
139
101
140 def invalidate(self, *args, **kwargs):
102 def invalidate(self, *args, **kwargs):
141 return False
103 return False
142
104
143 def compute(self):
105 def compute(self):
144 log.debug('Context cache: getting obj %s from cache', self.context)
106 log.debug('Context cache: getting obj %s from cache', self.context)
145 return self.context.compute_func(self.context.cache_key)
107 return self.context.compute_func(self.context.cache_key)
146
108
147
109
148 class FreshRegionCache(ActiveRegionCache):
110 class FreshRegionCache(ActiveRegionCache):
149 def invalidate(self):
111 def invalidate(self):
150 log.debug('Context cache: invalidating cache for %s', self.context)
112 log.debug('Context cache: invalidating cache for %s', self.context)
151 region_invalidate(
113 region_invalidate(
152 self.context.compute_func, None, self.context.cache_key)
114 self.context.compute_func, None, self.context.cache_key)
153 return True
115 return True
154
116
155
117
156 class InvalidationContext(object):
118 class InvalidationContext(object):
157 def __repr__(self):
119 def __repr__(self):
158 return '<InvalidationContext:{}[{}]>'.format(
120 return '<InvalidationContext:{}[{}]>'.format(
159 safe_str(self.repo_name), safe_str(self.cache_type))
121 safe_str(self.repo_name), safe_str(self.cache_type))
160
122
161 def __init__(self, compute_func, repo_name, cache_type,
123 def __init__(self, compute_func, repo_name, cache_type,
162 raise_exception=False, thread_scoped=False):
124 raise_exception=False, thread_scoped=False):
163 self.compute_func = compute_func
125 self.compute_func = compute_func
164 self.repo_name = repo_name
126 self.repo_name = repo_name
165 self.cache_type = cache_type
127 self.cache_type = cache_type
166 self.cache_key = compute_key_from_params(
128 self.cache_key = compute_key_from_params(
167 repo_name, cache_type)
129 repo_name, cache_type)
168 self.raise_exception = raise_exception
130 self.raise_exception = raise_exception
169
131
170 # Append the thread id to the cache key if this invalidation context
132 # Append the thread id to the cache key if this invalidation context
171 # should be scoped to the current thread.
133 # should be scoped to the current thread.
172 if thread_scoped:
134 if thread_scoped:
173 thread_id = threading.current_thread().ident
135 thread_id = threading.current_thread().ident
174 self.cache_key = '{cache_key}_{thread_id}'.format(
136 self.cache_key = '{cache_key}_{thread_id}'.format(
175 cache_key=self.cache_key, thread_id=thread_id)
137 cache_key=self.cache_key, thread_id=thread_id)
176
138
177 def get_cache_obj(self):
139 def get_cache_obj(self):
178 cache_key = CacheKey.get_cache_key(
140 cache_key = CacheKey.get_cache_key(
179 self.repo_name, self.cache_type)
141 self.repo_name, self.cache_type)
180 cache_obj = CacheKey.get_active_cache(cache_key)
142 cache_obj = CacheKey.get_active_cache(cache_key)
181 if not cache_obj:
143 if not cache_obj:
182 cache_obj = CacheKey(cache_key, self.repo_name)
144 cache_obj = CacheKey(cache_key, self.repo_name)
183 return cache_obj
145 return cache_obj
184
146
185 def __enter__(self):
147 def __enter__(self):
186 """
148 """
187 Test if current object is valid, and return CacheRegion function
149 Test if current object is valid, and return CacheRegion function
188 that does invalidation and calculation
150 that does invalidation and calculation
189 """
151 """
190
152
191 self.cache_obj = self.get_cache_obj()
153 self.cache_obj = self.get_cache_obj()
192 if self.cache_obj.cache_active:
154 if self.cache_obj.cache_active:
193 # means our cache obj is existing and marked as it's
155 # means our cache obj is existing and marked as it's
194 # cache is not outdated, we return BaseInvalidator
156 # cache is not outdated, we return BaseInvalidator
195 self.skip_cache_active_change = True
157 self.skip_cache_active_change = True
196 return ActiveRegionCache(self)
158 return ActiveRegionCache(self)
197
159
198 # the key is either not existing or set to False, we return
160 # the key is either not existing or set to False, we return
199 # the real invalidator which re-computes value. We additionally set
161 # the real invalidator which re-computes value. We additionally set
200 # the flag to actually update the Database objects
162 # the flag to actually update the Database objects
201 self.skip_cache_active_change = False
163 self.skip_cache_active_change = False
202 return FreshRegionCache(self)
164 return FreshRegionCache(self)
203
165
204 def __exit__(self, exc_type, exc_val, exc_tb):
166 def __exit__(self, exc_type, exc_val, exc_tb):
205
167
206 if self.skip_cache_active_change:
168 if self.skip_cache_active_change:
207 return
169 return
208
170
209 try:
171 try:
210 self.cache_obj.cache_active = True
172 self.cache_obj.cache_active = True
211 Session().add(self.cache_obj)
173 Session().add(self.cache_obj)
212 Session().commit()
174 Session().commit()
213 except IntegrityError:
175 except IntegrityError:
214 # if we catch integrity error, it means we inserted this object
176 # if we catch integrity error, it means we inserted this object
215 # assumption is that's really an edge race-condition case and
177 # assumption is that's really an edge race-condition case and
216 # it's safe is to skip it
178 # it's safe is to skip it
217 Session().rollback()
179 Session().rollback()
218 except Exception:
180 except Exception:
219 log.exception('Failed to commit on cache key update')
181 log.exception('Failed to commit on cache key update')
220 Session().rollback()
182 Session().rollback()
221 if self.raise_exception:
183 if self.raise_exception:
222 raise
184 raise
223
185
224
186
225 def includeme(config):
187 def includeme(config):
226 configure_caches(config.registry.settings)
188 configure_caches(config.registry.settings)
@@ -1,325 +1,298 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """caching_query.py
22 caching_query.py
23
22
24 Represent persistence structures which allow the usage of
23 Represent functions and classes
25 Beaker caching with SQLAlchemy.
24 which allow the usage of Dogpile caching with SQLAlchemy.
25 Introduces a query option called FromCache.
26
26
27 The three new concepts introduced here are:
27 The three new concepts introduced here are:
28
28
29 * CachingQuery - a Query subclass that caches and
29 * CachingQuery - a Query subclass that caches and
30 retrieves results in/from Beaker.
30 retrieves results in/from dogpile.cache.
31 * FromCache - a query option that establishes caching
31 * FromCache - a query option that establishes caching
32 parameters on a Query
32 parameters on a Query
33 * RelationshipCache - a variant of FromCache which is specific
33 * RelationshipCache - a variant of FromCache which is specific
34 to a query invoked during a lazy load.
34 to a query invoked during a lazy load.
35 * _params_from_query - extracts value parameters from
35 * _params_from_query - extracts value parameters from
36 a Query.
36 a Query.
37
37
38 The rest of what's here are standard SQLAlchemy and
38 The rest of what's here are standard SQLAlchemy and
39 Beaker constructs.
39 dogpile.cache constructs.
40
40
41 """
41 """
42 import beaker
43 from beaker.exceptions import BeakerException
44
45 from sqlalchemy.orm.interfaces import MapperOption
42 from sqlalchemy.orm.interfaces import MapperOption
46 from sqlalchemy.orm.query import Query
43 from sqlalchemy.orm.query import Query
47 from sqlalchemy.sql import visitors
44 from sqlalchemy.sql import visitors
45 from dogpile.cache.api import NO_VALUE
48
46
49 from rhodecode.lib.utils2 import safe_str
47 from rhodecode.lib.utils2 import safe_str
50
48
51
49
52 class CachingQuery(Query):
50 class CachingQuery(Query):
53 """A Query subclass which optionally loads full results from a Beaker
51 """A Query subclass which optionally loads full results from a dogpile
54 cache region.
52 cache region.
55
53
56 The CachingQuery stores additional state that allows it to consult
54 The CachingQuery optionally stores additional state that allows it to consult
57 a Beaker cache before accessing the database:
55 a dogpile.cache cache before accessing the database, in the form
58
56 of a FromCache or RelationshipCache object. Each of these objects
59 * A "region", which is a cache region argument passed to a
57 refer to the name of a :class:`dogpile.cache.Region` that's been configured
60 Beaker CacheManager, specifies a particular cache configuration
58 and stored in a lookup dictionary. When such an object has associated
61 (including backend implementation, expiration times, etc.)
59 itself with the CachingQuery, the corresponding :class:`dogpile.cache.Region`
62 * A "namespace", which is a qualifying name that identifies a
60 is used to locate a cached result. If none is present, then the
63 group of keys within the cache. A query that filters on a name
61 Query is invoked normally, the results being cached.
64 might use the name "by_name", a query that filters on a date range
65 to a joined table might use the name "related_date_range".
66
67 When the above state is present, a Beaker cache is retrieved.
68
69 The "namespace" name is first concatenated with
70 a string composed of the individual entities and columns the Query
71 requests, i.e. such as ``Query(User.id, User.name)``.
72
73 The Beaker cache is then loaded from the cache manager based
74 on the region and composed namespace. The key within the cache
75 itself is then constructed against the bind parameters specified
76 by this query, which are usually literals defined in the
77 WHERE clause.
78
62
79 The FromCache and RelationshipCache mapper options below represent
63 The FromCache and RelationshipCache mapper options below represent
80 the "public" method of configuring this state upon the CachingQuery.
64 the "public" method of configuring this state upon the CachingQuery.
81
65
82 """
66 """
67 def _get_region(self):
68 from rhodecode.lib.rc_cache import region_meta
69 return region_meta.dogpile_cache_regions
83
70
84 def __init__(self, manager, *args, **kw):
71 def __init__(self, regions, *args, **kw):
85 self.cache_manager = manager
72 self.cache_regions = regions or self._get_region()
86 Query.__init__(self, *args, **kw)
73 Query.__init__(self, *args, **kw)
87
74
88 def __iter__(self):
75 def __iter__(self):
89 """override __iter__ to pull results from Beaker
76 """override __iter__ to pull results from dogpile
90 if particular attributes have been configured.
77 if particular attributes have been configured.
91
78
92 Note that this approach does *not* detach the loaded objects from
79 Note that this approach does *not* detach the loaded objects from
93 the current session. If the cache backend is an in-process cache
80 the current session. If the cache backend is an in-process cache
94 (like "memory") and lives beyond the scope of the current session's
81 (like "memory") and lives beyond the scope of the current session's
95 transaction, those objects may be expired. The method here can be
82 transaction, those objects may be expired. The method here can be
96 modified to first expunge() each loaded item from the current
83 modified to first expunge() each loaded item from the current
97 session before returning the list of items, so that the items
84 session before returning the list of items, so that the items
98 in the cache are not the same ones in the current Session.
85 in the cache are not the same ones in the current Session.
99
86
100 """
87 """
101 if hasattr(self, '_cache_parameters'):
88 super_ = super(CachingQuery, self)
89
90 if hasattr(self, '_cache_region'):
91 return self.get_value(createfunc=lambda: list(super_.__iter__()))
92 else:
93 return super_.__iter__()
94
95 def _execute_and_instances(self, context):
96 """override _execute_and_instances to pull results from dogpile
97 if the query is invoked directly from an external context.
98
99 This method is necessary in order to maintain compatibility
100 with the "baked query" system now used by default in some
101 relationship loader scenarios. Note also the
102 RelationshipCache._generate_cache_key method which enables
103 the baked query to be used within lazy loads.
102
104
103 def caching_query():
105 .. versionadded:: 1.2.7
104 return list(Query.__iter__(self))
106 """
107 super_ = super(CachingQuery, self)
105
108
106 return self.get_value(createfunc=caching_query)
109 if context.query is not self and hasattr(self, '_cache_region'):
110 # special logic called when the Query._execute_and_instances()
111 # method is called directly from the baked query
112 return self.get_value(
113 createfunc=lambda: list(
114 super_._execute_and_instances(context)
115 )
116 )
107 else:
117 else:
108 return Query.__iter__(self)
118 return super_._execute_and_instances(context)
119
120 def _get_cache_plus_key(self):
121 """Return a cache region plus key."""
122 dogpile_region = self.cache_regions[self._cache_region.region]
123 if self._cache_region.cache_key:
124 key = self._cache_region.cache_key
125 else:
126 key = _key_from_query(self)
127 return dogpile_region, key
109
128
110 def invalidate(self):
129 def invalidate(self):
111 """Invalidate the value represented by this Query."""
130 """Invalidate the cache value represented by this Query."""
112
131
113 cache, cache_key = _get_cache_parameters(self)
132 dogpile_region, cache_key = self._get_cache_plus_key()
114 cache.remove(cache_key)
133 dogpile_region.delete(cache_key)
115
134
116 def get_value(self, merge=True, createfunc=None):
135 def get_value(self, merge=True, createfunc=None,
136 expiration_time=None, ignore_expiration=False):
117 """Return the value from the cache for this query.
137 """Return the value from the cache for this query.
118
138
119 Raise KeyError if no value present and no
139 Raise KeyError if no value present and no
120 createfunc specified.
140 createfunc specified.
121
141
122 """
142 """
123 cache, cache_key = _get_cache_parameters(self)
143 dogpile_region, cache_key = self._get_cache_plus_key()
124 ret = cache.get_value(cache_key, createfunc=createfunc)
144
145 # ignore_expiration means, if the value is in the cache
146 # but is expired, return it anyway. This doesn't make sense
147 # with createfunc, which says, if the value is expired, generate
148 # a new value.
149 assert not ignore_expiration or not createfunc, \
150 "Can't ignore expiration and also provide createfunc"
151
152 if ignore_expiration or not createfunc:
153 cached_value = dogpile_region.get(cache_key,
154 expiration_time=expiration_time,
155 ignore_expiration=ignore_expiration)
156 else:
157 cached_value = dogpile_region.get_or_create(
158 cache_key,
159 createfunc,
160 expiration_time=expiration_time
161 )
162 if cached_value is NO_VALUE:
163 raise KeyError(cache_key)
125 if merge:
164 if merge:
126 ret = self.merge_result(ret, load=False)
165 cached_value = self.merge_result(cached_value, load=False)
127 return ret
166 return cached_value
128
167
129 def set_value(self, value):
168 def set_value(self, value):
130 """Set the value in the cache for this query."""
169 """Set the value in the cache for this query."""
131
170
132 cache, cache_key = _get_cache_parameters(self)
171 dogpile_region, cache_key = self._get_cache_plus_key()
133 cache.put(cache_key, value)
172 dogpile_region.set(cache_key, value)
134
173
135
174
136 def query_callable(manager, query_cls=CachingQuery):
175 def query_callable(regions=None, query_cls=CachingQuery):
137 def query(*arg, **kw):
176 def query(*arg, **kw):
138 return query_cls(manager, *arg, **kw)
177 return query_cls(regions, *arg, **kw)
139 return query
178 return query
140
179
141
180
142 def get_cache_region(name, region):
181 def _key_from_query(query, qualifier=None):
143 if region not in beaker.cache.cache_regions:
182 """Given a Query, create a cache key.
144 raise BeakerException('Cache region `%s` not configured '
145 'Check if proper cache settings are in the .ini files' % region)
146 kw = beaker.cache.cache_regions[region]
147 return beaker.cache.Cache._get_cache(name, kw)
148
183
149
184 There are many approaches to this; here we use the simplest,
150 def _get_cache_parameters(query):
185 which is to create an md5 hash of the text of the SQL statement,
151 """For a query with cache_region and cache_namespace configured,
186 combined with stringified versions of all the bound parameters
152 return the correspoinding Cache instance and cache key, based
187 within it. There's a bit of a performance hit with
153 on this query's current criterion and parameter values.
188 compiling out "query.statement" here; other approaches include
189 setting up an explicit cache key with a particular Query,
190 then combining that with the bound parameter values.
154
191
155 """
192 """
156 if not hasattr(query, '_cache_parameters'):
157 raise ValueError("This Query does not have caching "
158 "parameters configured.")
159
193
160 region, namespace, cache_key = query._cache_parameters
194 stmt = query.with_labels().statement
161
195 compiled = stmt.compile()
162 namespace = _namespace_from_query(namespace, query)
196 params = compiled.params
163
164 if cache_key is None:
165 # cache key - the value arguments from this query's parameters.
166 args = [safe_str(x) for x in _params_from_query(query)]
167 args.extend(filter(lambda k: k not in ['None', None, u'None'],
168 [str(query._limit), str(query._offset)]))
169
170 cache_key = " ".join(args)
171
172 if cache_key is None:
173 raise Exception('Cache key cannot be None')
174
197
175 # get cache
198 # here we return the key as a long string. our "key mangler"
176 #cache = query.cache_manager.get_cache_region(namespace, region)
199 # set up with the region will boil it down to an md5.
177 cache = get_cache_region(namespace, region)
200 return " ".join(
178 # optional - hash the cache_key too for consistent length
201 [safe_str(compiled)] +
179 # import uuid
202 [safe_str(params[k]) for k in sorted(params)])
180 # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key))
181
182 return cache, cache_key
183
184
185 def _namespace_from_query(namespace, query):
186 # cache namespace - the token handed in by the
187 # option + class we're querying against
188 namespace = " ".join([namespace] + [str(x) for x in query._entities])
189
190 # memcached wants this
191 namespace = namespace.replace(' ', '_')
192
193 return namespace
194
195
196 def _set_cache_parameters(query, region, namespace, cache_key):
197
198 if hasattr(query, '_cache_parameters'):
199 region, namespace, cache_key = query._cache_parameters
200 raise ValueError("This query is already configured "
201 "for region %r namespace %r" %
202 (region, namespace))
203 query._cache_parameters = region, namespace, cache_key
204
203
205
204
206 class FromCache(MapperOption):
205 class FromCache(MapperOption):
207 """Specifies that a Query should load results from a cache."""
206 """Specifies that a Query should load results from a cache."""
208
207
209 propagate_to_loaders = False
208 propagate_to_loaders = False
210
209
211 def __init__(self, region, namespace, cache_key=None):
210 def __init__(self, region="sql_cache_short", cache_key=None):
212 """Construct a new FromCache.
211 """Construct a new FromCache.
213
212
214 :param region: the cache region. Should be a
213 :param region: the cache region. Should be a
215 region configured in the Beaker CacheManager.
214 region configured in the dictionary of dogpile
216
215 regions.
217 :param namespace: the cache namespace. Should
218 be a name uniquely describing the target Query's
219 lexical structure.
220
216
221 :param cache_key: optional. A string cache key
217 :param cache_key: optional. A string cache key
222 that will serve as the key to the query. Use this
218 that will serve as the key to the query. Use this
223 if your query has a huge amount of parameters (such
219 if your query has a huge amount of parameters (such
224 as when using in_()) which correspond more simply to
220 as when using in_()) which correspond more simply to
225 some other identifier.
221 some other identifier.
226
222
227 """
223 """
228 self.region = region
224 self.region = region
229 self.namespace = namespace
230 self.cache_key = cache_key
225 self.cache_key = cache_key
231
226
232 def process_query(self, query):
227 def process_query(self, query):
233 """Process a Query during normal loading operation."""
228 """Process a Query during normal loading operation."""
234
229 query._cache_region = self
235 _set_cache_parameters(query, self.region, self.namespace,
236 self.cache_key)
237
230
238
231
239 class RelationshipCache(MapperOption):
232 class RelationshipCache(MapperOption):
240 """Specifies that a Query as called within a "lazy load"
233 """Specifies that a Query as called within a "lazy load"
241 should load results from a cache."""
234 should load results from a cache."""
242
235
243 propagate_to_loaders = True
236 propagate_to_loaders = True
244
237
245 def __init__(self, region, namespace, attribute):
238 def __init__(self, attribute, region="sql_cache_short", cache_key=None):
246 """Construct a new RelationshipCache.
239 """Construct a new RelationshipCache.
247
240
248 :param region: the cache region. Should be a
249 region configured in the Beaker CacheManager.
250
251 :param namespace: the cache namespace. Should
252 be a name uniquely describing the target Query's
253 lexical structure.
254
255 :param attribute: A Class.attribute which
241 :param attribute: A Class.attribute which
256 indicates a particular class relationship() whose
242 indicates a particular class relationship() whose
257 lazy loader should be pulled from the cache.
243 lazy loader should be pulled from the cache.
258
244
245 :param region: name of the cache region.
246
247 :param cache_key: optional. A string cache key
248 that will serve as the key to the query, bypassing
249 the usual means of forming a key from the Query itself.
250
259 """
251 """
260 self.region = region
252 self.region = region
261 self.namespace = namespace
253 self.cache_key = cache_key
262 self._relationship_options = {
254 self._relationship_options = {
263 (attribute.property.parent.class_, attribute.property.key): self
255 (attribute.property.parent.class_, attribute.property.key): self
264 }
256 }
265
257
258 def _generate_cache_key(self, path):
259 """Indicate to the lazy-loader strategy that a "baked" query
260 may be used by returning ``None``.
261
262 If this method is omitted, the default implementation of
263 :class:`.MapperOption._generate_cache_key` takes place, which
264 returns ``False`` to disable the "baked" query from being used.
265
266 .. versionadded:: 1.2.7
267
268 """
269 return None
270
266 def process_query_conditionally(self, query):
271 def process_query_conditionally(self, query):
267 """Process a Query that is used within a lazy loader.
272 """Process a Query that is used within a lazy loader.
268
273
269 (the process_query_conditionally() method is a SQLAlchemy
274 (the process_query_conditionally() method is a SQLAlchemy
270 hook invoked only within lazyload.)
275 hook invoked only within lazyload.)
271
276
272 """
277 """
273 if query._current_path:
278 if query._current_path:
274 mapper, key = query._current_path[-2:]
279 mapper, prop = query._current_path[-2:]
280 key = prop.key
275
281
276 for cls in mapper.class_.__mro__:
282 for cls in mapper.class_.__mro__:
277 if (cls, key) in self._relationship_options:
283 if (cls, key) in self._relationship_options:
278 relationship_option = \
284 relationship_option = self._relationship_options[(cls, key)]
279 self._relationship_options[(cls, key)]
285 query._cache_region = relationship_option
280 _set_cache_parameters(
286 break
281 query,
282 relationship_option.region,
283 relationship_option.namespace,
284 None)
285
287
286 def and_(self, option):
288 def and_(self, option):
287 """Chain another RelationshipCache option to this one.
289 """Chain another RelationshipCache option to this one.
288
290
289 While many RelationshipCache objects can be specified on a single
291 While many RelationshipCache objects can be specified on a single
290 Query separately, chaining them together allows for a more efficient
292 Query separately, chaining them together allows for a more efficient
291 lookup during load.
293 lookup during load.
292
294
293 """
295 """
294 self._relationship_options.update(option._relationship_options)
296 self._relationship_options.update(option._relationship_options)
295 return self
297 return self
296
298
297
298 def _params_from_query(query):
299 """Pull the bind parameter values from a query.
300
301 This takes into account any scalar attribute bindparam set up.
302
303 E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7)))
304 would return [5, 7].
305
306 """
307 v = []
308 def visit_bindparam(bind):
309
310 if bind.key in query._params:
311 value = query._params[bind.key]
312 elif bind.callable:
313 # lazyloader may dig a callable in here, intended
314 # to late-evaluate params after autoflush is called.
315 # convert to a scalar value.
316 value = bind.callable()
317 else:
318 value = bind.value
319
320 v.append(value)
321 if query._criterion is not None:
322 visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam})
323 for f in query._from_obj:
324 visitors.traverse(f, {}, {'bindparam':visit_bindparam})
325 return v
@@ -1,4527 +1,4527 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Database Models for RhodeCode Enterprise
22 Database Models for RhodeCode Enterprise
23 """
23 """
24
24
25 import re
25 import re
26 import os
26 import os
27 import time
27 import time
28 import hashlib
28 import hashlib
29 import logging
29 import logging
30 import datetime
30 import datetime
31 import warnings
31 import warnings
32 import ipaddress
32 import ipaddress
33 import functools
33 import functools
34 import traceback
34 import traceback
35 import collections
35 import collections
36
36
37 from sqlalchemy import (
37 from sqlalchemy import (
38 or_, and_, not_, func, TypeDecorator, event,
38 or_, and_, not_, func, TypeDecorator, event,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 Text, Float, PickleType)
41 Text, Float, PickleType)
42 from sqlalchemy.sql.expression import true, false
42 from sqlalchemy.sql.expression import true, false
43 from sqlalchemy.sql.functions import coalesce, count # noqa
43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 from sqlalchemy.orm import (
44 from sqlalchemy.orm import (
45 relationship, joinedload, class_mapper, validates, aliased)
45 relationship, joinedload, class_mapper, validates, aliased)
46 from sqlalchemy.ext.declarative import declared_attr
46 from sqlalchemy.ext.declarative import declared_attr
47 from sqlalchemy.ext.hybrid import hybrid_property
47 from sqlalchemy.ext.hybrid import hybrid_property
48 from sqlalchemy.exc import IntegrityError # noqa
48 from sqlalchemy.exc import IntegrityError # noqa
49 from sqlalchemy.dialects.mysql import LONGTEXT
49 from sqlalchemy.dialects.mysql import LONGTEXT
50 from beaker.cache import cache_region
50 from beaker.cache import cache_region
51 from zope.cachedescriptors.property import Lazy as LazyProperty
51 from zope.cachedescriptors.property import Lazy as LazyProperty
52
52
53 from pyramid.threadlocal import get_current_request
53 from pyramid.threadlocal import get_current_request
54
54
55 from rhodecode.translation import _
55 from rhodecode.translation import _
56 from rhodecode.lib.vcs import get_vcs_instance
56 from rhodecode.lib.vcs import get_vcs_instance
57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
58 from rhodecode.lib.utils2 import (
58 from rhodecode.lib.utils2 import (
59 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
59 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 glob2re, StrictAttributeDict, cleaned_uri)
61 glob2re, StrictAttributeDict, cleaned_uri)
62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
63 JsonRaw
63 JsonRaw
64 from rhodecode.lib.ext_json import json
64 from rhodecode.lib.ext_json import json
65 from rhodecode.lib.caching_query import FromCache
65 from rhodecode.lib.caching_query import FromCache
66 from rhodecode.lib.encrypt import AESCipher
66 from rhodecode.lib.encrypt import AESCipher
67
67
68 from rhodecode.model.meta import Base, Session
68 from rhodecode.model.meta import Base, Session
69
69
70 URL_SEP = '/'
70 URL_SEP = '/'
71 log = logging.getLogger(__name__)
71 log = logging.getLogger(__name__)
72
72
73 # =============================================================================
73 # =============================================================================
74 # BASE CLASSES
74 # BASE CLASSES
75 # =============================================================================
75 # =============================================================================
76
76
77 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 # this is propagated from .ini file rhodecode.encrypted_values.secret or
78 # beaker.session.secret if first is not set.
78 # beaker.session.secret if first is not set.
79 # and initialized at environment.py
79 # and initialized at environment.py
80 ENCRYPTION_KEY = None
80 ENCRYPTION_KEY = None
81
81
82 # used to sort permissions by types, '#' used here is not allowed to be in
82 # used to sort permissions by types, '#' used here is not allowed to be in
83 # usernames, and it's very early in sorted string.printable table.
83 # usernames, and it's very early in sorted string.printable table.
84 PERMISSION_TYPE_SORT = {
84 PERMISSION_TYPE_SORT = {
85 'admin': '####',
85 'admin': '####',
86 'write': '###',
86 'write': '###',
87 'read': '##',
87 'read': '##',
88 'none': '#',
88 'none': '#',
89 }
89 }
90
90
91
91
92 def display_user_sort(obj):
92 def display_user_sort(obj):
93 """
93 """
94 Sort function used to sort permissions in .permissions() function of
94 Sort function used to sort permissions in .permissions() function of
95 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 Repository, RepoGroup, UserGroup. Also it put the default user in front
96 of all other resources
96 of all other resources
97 """
97 """
98
98
99 if obj.username == User.DEFAULT_USER:
99 if obj.username == User.DEFAULT_USER:
100 return '#####'
100 return '#####'
101 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
102 return prefix + obj.username
102 return prefix + obj.username
103
103
104
104
105 def display_user_group_sort(obj):
105 def display_user_group_sort(obj):
106 """
106 """
107 Sort function used to sort permissions in .permissions() function of
107 Sort function used to sort permissions in .permissions() function of
108 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 Repository, RepoGroup, UserGroup. Also it put the default user in front
109 of all other resources
109 of all other resources
110 """
110 """
111
111
112 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
113 return prefix + obj.users_group_name
113 return prefix + obj.users_group_name
114
114
115
115
116 def _hash_key(k):
116 def _hash_key(k):
117 return sha1_safe(k)
117 return sha1_safe(k)
118
118
119
119
120 def in_filter_generator(qry, items, limit=500):
120 def in_filter_generator(qry, items, limit=500):
121 """
121 """
122 Splits IN() into multiple with OR
122 Splits IN() into multiple with OR
123 e.g.::
123 e.g.::
124 cnt = Repository.query().filter(
124 cnt = Repository.query().filter(
125 or_(
125 or_(
126 *in_filter_generator(Repository.repo_id, range(100000))
126 *in_filter_generator(Repository.repo_id, range(100000))
127 )).count()
127 )).count()
128 """
128 """
129 if not items:
129 if not items:
130 # empty list will cause empty query which might cause security issues
130 # empty list will cause empty query which might cause security issues
131 # this can lead to hidden unpleasant results
131 # this can lead to hidden unpleasant results
132 items = [-1]
132 items = [-1]
133
133
134 parts = []
134 parts = []
135 for chunk in xrange(0, len(items), limit):
135 for chunk in xrange(0, len(items), limit):
136 parts.append(
136 parts.append(
137 qry.in_(items[chunk: chunk + limit])
137 qry.in_(items[chunk: chunk + limit])
138 )
138 )
139
139
140 return parts
140 return parts
141
141
142
142
143 base_table_args = {
143 base_table_args = {
144 'extend_existing': True,
144 'extend_existing': True,
145 'mysql_engine': 'InnoDB',
145 'mysql_engine': 'InnoDB',
146 'mysql_charset': 'utf8',
146 'mysql_charset': 'utf8',
147 'sqlite_autoincrement': True
147 'sqlite_autoincrement': True
148 }
148 }
149
149
150
150
151 class EncryptedTextValue(TypeDecorator):
151 class EncryptedTextValue(TypeDecorator):
152 """
152 """
153 Special column for encrypted long text data, use like::
153 Special column for encrypted long text data, use like::
154
154
155 value = Column("encrypted_value", EncryptedValue(), nullable=False)
155 value = Column("encrypted_value", EncryptedValue(), nullable=False)
156
156
157 This column is intelligent so if value is in unencrypted form it return
157 This column is intelligent so if value is in unencrypted form it return
158 unencrypted form, but on save it always encrypts
158 unencrypted form, but on save it always encrypts
159 """
159 """
160 impl = Text
160 impl = Text
161
161
162 def process_bind_param(self, value, dialect):
162 def process_bind_param(self, value, dialect):
163 if not value:
163 if not value:
164 return value
164 return value
165 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
165 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
166 # protect against double encrypting if someone manually starts
166 # protect against double encrypting if someone manually starts
167 # doing
167 # doing
168 raise ValueError('value needs to be in unencrypted format, ie. '
168 raise ValueError('value needs to be in unencrypted format, ie. '
169 'not starting with enc$aes')
169 'not starting with enc$aes')
170 return 'enc$aes_hmac$%s' % AESCipher(
170 return 'enc$aes_hmac$%s' % AESCipher(
171 ENCRYPTION_KEY, hmac=True).encrypt(value)
171 ENCRYPTION_KEY, hmac=True).encrypt(value)
172
172
173 def process_result_value(self, value, dialect):
173 def process_result_value(self, value, dialect):
174 import rhodecode
174 import rhodecode
175
175
176 if not value:
176 if not value:
177 return value
177 return value
178
178
179 parts = value.split('$', 3)
179 parts = value.split('$', 3)
180 if not len(parts) == 3:
180 if not len(parts) == 3:
181 # probably not encrypted values
181 # probably not encrypted values
182 return value
182 return value
183 else:
183 else:
184 if parts[0] != 'enc':
184 if parts[0] != 'enc':
185 # parts ok but without our header ?
185 # parts ok but without our header ?
186 return value
186 return value
187 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
187 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
188 'rhodecode.encrypted_values.strict') or True)
188 'rhodecode.encrypted_values.strict') or True)
189 # at that stage we know it's our encryption
189 # at that stage we know it's our encryption
190 if parts[1] == 'aes':
190 if parts[1] == 'aes':
191 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
191 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
192 elif parts[1] == 'aes_hmac':
192 elif parts[1] == 'aes_hmac':
193 decrypted_data = AESCipher(
193 decrypted_data = AESCipher(
194 ENCRYPTION_KEY, hmac=True,
194 ENCRYPTION_KEY, hmac=True,
195 strict_verification=enc_strict_mode).decrypt(parts[2])
195 strict_verification=enc_strict_mode).decrypt(parts[2])
196 else:
196 else:
197 raise ValueError(
197 raise ValueError(
198 'Encryption type part is wrong, must be `aes` '
198 'Encryption type part is wrong, must be `aes` '
199 'or `aes_hmac`, got `%s` instead' % (parts[1]))
199 'or `aes_hmac`, got `%s` instead' % (parts[1]))
200 return decrypted_data
200 return decrypted_data
201
201
202
202
203 class BaseModel(object):
203 class BaseModel(object):
204 """
204 """
205 Base Model for all classes
205 Base Model for all classes
206 """
206 """
207
207
208 @classmethod
208 @classmethod
209 def _get_keys(cls):
209 def _get_keys(cls):
210 """return column names for this model """
210 """return column names for this model """
211 return class_mapper(cls).c.keys()
211 return class_mapper(cls).c.keys()
212
212
213 def get_dict(self):
213 def get_dict(self):
214 """
214 """
215 return dict with keys and values corresponding
215 return dict with keys and values corresponding
216 to this model data """
216 to this model data """
217
217
218 d = {}
218 d = {}
219 for k in self._get_keys():
219 for k in self._get_keys():
220 d[k] = getattr(self, k)
220 d[k] = getattr(self, k)
221
221
222 # also use __json__() if present to get additional fields
222 # also use __json__() if present to get additional fields
223 _json_attr = getattr(self, '__json__', None)
223 _json_attr = getattr(self, '__json__', None)
224 if _json_attr:
224 if _json_attr:
225 # update with attributes from __json__
225 # update with attributes from __json__
226 if callable(_json_attr):
226 if callable(_json_attr):
227 _json_attr = _json_attr()
227 _json_attr = _json_attr()
228 for k, val in _json_attr.iteritems():
228 for k, val in _json_attr.iteritems():
229 d[k] = val
229 d[k] = val
230 return d
230 return d
231
231
232 def get_appstruct(self):
232 def get_appstruct(self):
233 """return list with keys and values tuples corresponding
233 """return list with keys and values tuples corresponding
234 to this model data """
234 to this model data """
235
235
236 lst = []
236 lst = []
237 for k in self._get_keys():
237 for k in self._get_keys():
238 lst.append((k, getattr(self, k),))
238 lst.append((k, getattr(self, k),))
239 return lst
239 return lst
240
240
241 def populate_obj(self, populate_dict):
241 def populate_obj(self, populate_dict):
242 """populate model with data from given populate_dict"""
242 """populate model with data from given populate_dict"""
243
243
244 for k in self._get_keys():
244 for k in self._get_keys():
245 if k in populate_dict:
245 if k in populate_dict:
246 setattr(self, k, populate_dict[k])
246 setattr(self, k, populate_dict[k])
247
247
248 @classmethod
248 @classmethod
249 def query(cls):
249 def query(cls):
250 return Session().query(cls)
250 return Session().query(cls)
251
251
252 @classmethod
252 @classmethod
253 def get(cls, id_):
253 def get(cls, id_):
254 if id_:
254 if id_:
255 return cls.query().get(id_)
255 return cls.query().get(id_)
256
256
257 @classmethod
257 @classmethod
258 def get_or_404(cls, id_):
258 def get_or_404(cls, id_):
259 from pyramid.httpexceptions import HTTPNotFound
259 from pyramid.httpexceptions import HTTPNotFound
260
260
261 try:
261 try:
262 id_ = int(id_)
262 id_ = int(id_)
263 except (TypeError, ValueError):
263 except (TypeError, ValueError):
264 raise HTTPNotFound()
264 raise HTTPNotFound()
265
265
266 res = cls.query().get(id_)
266 res = cls.query().get(id_)
267 if not res:
267 if not res:
268 raise HTTPNotFound()
268 raise HTTPNotFound()
269 return res
269 return res
270
270
271 @classmethod
271 @classmethod
272 def getAll(cls):
272 def getAll(cls):
273 # deprecated and left for backward compatibility
273 # deprecated and left for backward compatibility
274 return cls.get_all()
274 return cls.get_all()
275
275
276 @classmethod
276 @classmethod
277 def get_all(cls):
277 def get_all(cls):
278 return cls.query().all()
278 return cls.query().all()
279
279
280 @classmethod
280 @classmethod
281 def delete(cls, id_):
281 def delete(cls, id_):
282 obj = cls.query().get(id_)
282 obj = cls.query().get(id_)
283 Session().delete(obj)
283 Session().delete(obj)
284
284
285 @classmethod
285 @classmethod
286 def identity_cache(cls, session, attr_name, value):
286 def identity_cache(cls, session, attr_name, value):
287 exist_in_session = []
287 exist_in_session = []
288 for (item_cls, pkey), instance in session.identity_map.items():
288 for (item_cls, pkey), instance in session.identity_map.items():
289 if cls == item_cls and getattr(instance, attr_name) == value:
289 if cls == item_cls and getattr(instance, attr_name) == value:
290 exist_in_session.append(instance)
290 exist_in_session.append(instance)
291 if exist_in_session:
291 if exist_in_session:
292 if len(exist_in_session) == 1:
292 if len(exist_in_session) == 1:
293 return exist_in_session[0]
293 return exist_in_session[0]
294 log.exception(
294 log.exception(
295 'multiple objects with attr %s and '
295 'multiple objects with attr %s and '
296 'value %s found with same name: %r',
296 'value %s found with same name: %r',
297 attr_name, value, exist_in_session)
297 attr_name, value, exist_in_session)
298
298
299 def __repr__(self):
299 def __repr__(self):
300 if hasattr(self, '__unicode__'):
300 if hasattr(self, '__unicode__'):
301 # python repr needs to return str
301 # python repr needs to return str
302 try:
302 try:
303 return safe_str(self.__unicode__())
303 return safe_str(self.__unicode__())
304 except UnicodeDecodeError:
304 except UnicodeDecodeError:
305 pass
305 pass
306 return '<DB:%s>' % (self.__class__.__name__)
306 return '<DB:%s>' % (self.__class__.__name__)
307
307
308
308
309 class RhodeCodeSetting(Base, BaseModel):
309 class RhodeCodeSetting(Base, BaseModel):
310 __tablename__ = 'rhodecode_settings'
310 __tablename__ = 'rhodecode_settings'
311 __table_args__ = (
311 __table_args__ = (
312 UniqueConstraint('app_settings_name'),
312 UniqueConstraint('app_settings_name'),
313 base_table_args
313 base_table_args
314 )
314 )
315
315
316 SETTINGS_TYPES = {
316 SETTINGS_TYPES = {
317 'str': safe_str,
317 'str': safe_str,
318 'int': safe_int,
318 'int': safe_int,
319 'unicode': safe_unicode,
319 'unicode': safe_unicode,
320 'bool': str2bool,
320 'bool': str2bool,
321 'list': functools.partial(aslist, sep=',')
321 'list': functools.partial(aslist, sep=',')
322 }
322 }
323 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
323 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
324 GLOBAL_CONF_KEY = 'app_settings'
324 GLOBAL_CONF_KEY = 'app_settings'
325
325
326 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
326 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
327 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
327 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
328 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
328 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
329 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
329 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
330
330
331 def __init__(self, key='', val='', type='unicode'):
331 def __init__(self, key='', val='', type='unicode'):
332 self.app_settings_name = key
332 self.app_settings_name = key
333 self.app_settings_type = type
333 self.app_settings_type = type
334 self.app_settings_value = val
334 self.app_settings_value = val
335
335
336 @validates('_app_settings_value')
336 @validates('_app_settings_value')
337 def validate_settings_value(self, key, val):
337 def validate_settings_value(self, key, val):
338 assert type(val) == unicode
338 assert type(val) == unicode
339 return val
339 return val
340
340
341 @hybrid_property
341 @hybrid_property
342 def app_settings_value(self):
342 def app_settings_value(self):
343 v = self._app_settings_value
343 v = self._app_settings_value
344 _type = self.app_settings_type
344 _type = self.app_settings_type
345 if _type:
345 if _type:
346 _type = self.app_settings_type.split('.')[0]
346 _type = self.app_settings_type.split('.')[0]
347 # decode the encrypted value
347 # decode the encrypted value
348 if 'encrypted' in self.app_settings_type:
348 if 'encrypted' in self.app_settings_type:
349 cipher = EncryptedTextValue()
349 cipher = EncryptedTextValue()
350 v = safe_unicode(cipher.process_result_value(v, None))
350 v = safe_unicode(cipher.process_result_value(v, None))
351
351
352 converter = self.SETTINGS_TYPES.get(_type) or \
352 converter = self.SETTINGS_TYPES.get(_type) or \
353 self.SETTINGS_TYPES['unicode']
353 self.SETTINGS_TYPES['unicode']
354 return converter(v)
354 return converter(v)
355
355
356 @app_settings_value.setter
356 @app_settings_value.setter
357 def app_settings_value(self, val):
357 def app_settings_value(self, val):
358 """
358 """
359 Setter that will always make sure we use unicode in app_settings_value
359 Setter that will always make sure we use unicode in app_settings_value
360
360
361 :param val:
361 :param val:
362 """
362 """
363 val = safe_unicode(val)
363 val = safe_unicode(val)
364 # encode the encrypted value
364 # encode the encrypted value
365 if 'encrypted' in self.app_settings_type:
365 if 'encrypted' in self.app_settings_type:
366 cipher = EncryptedTextValue()
366 cipher = EncryptedTextValue()
367 val = safe_unicode(cipher.process_bind_param(val, None))
367 val = safe_unicode(cipher.process_bind_param(val, None))
368 self._app_settings_value = val
368 self._app_settings_value = val
369
369
370 @hybrid_property
370 @hybrid_property
371 def app_settings_type(self):
371 def app_settings_type(self):
372 return self._app_settings_type
372 return self._app_settings_type
373
373
374 @app_settings_type.setter
374 @app_settings_type.setter
375 def app_settings_type(self, val):
375 def app_settings_type(self, val):
376 if val.split('.')[0] not in self.SETTINGS_TYPES:
376 if val.split('.')[0] not in self.SETTINGS_TYPES:
377 raise Exception('type must be one of %s got %s'
377 raise Exception('type must be one of %s got %s'
378 % (self.SETTINGS_TYPES.keys(), val))
378 % (self.SETTINGS_TYPES.keys(), val))
379 self._app_settings_type = val
379 self._app_settings_type = val
380
380
381 def __unicode__(self):
381 def __unicode__(self):
382 return u"<%s('%s:%s[%s]')>" % (
382 return u"<%s('%s:%s[%s]')>" % (
383 self.__class__.__name__,
383 self.__class__.__name__,
384 self.app_settings_name, self.app_settings_value,
384 self.app_settings_name, self.app_settings_value,
385 self.app_settings_type
385 self.app_settings_type
386 )
386 )
387
387
388
388
389 class RhodeCodeUi(Base, BaseModel):
389 class RhodeCodeUi(Base, BaseModel):
390 __tablename__ = 'rhodecode_ui'
390 __tablename__ = 'rhodecode_ui'
391 __table_args__ = (
391 __table_args__ = (
392 UniqueConstraint('ui_key'),
392 UniqueConstraint('ui_key'),
393 base_table_args
393 base_table_args
394 )
394 )
395
395
396 HOOK_REPO_SIZE = 'changegroup.repo_size'
396 HOOK_REPO_SIZE = 'changegroup.repo_size'
397 # HG
397 # HG
398 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
398 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
399 HOOK_PULL = 'outgoing.pull_logger'
399 HOOK_PULL = 'outgoing.pull_logger'
400 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
400 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
401 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
401 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
402 HOOK_PUSH = 'changegroup.push_logger'
402 HOOK_PUSH = 'changegroup.push_logger'
403 HOOK_PUSH_KEY = 'pushkey.key_push'
403 HOOK_PUSH_KEY = 'pushkey.key_push'
404
404
405 # TODO: johbo: Unify way how hooks are configured for git and hg,
405 # TODO: johbo: Unify way how hooks are configured for git and hg,
406 # git part is currently hardcoded.
406 # git part is currently hardcoded.
407
407
408 # SVN PATTERNS
408 # SVN PATTERNS
409 SVN_BRANCH_ID = 'vcs_svn_branch'
409 SVN_BRANCH_ID = 'vcs_svn_branch'
410 SVN_TAG_ID = 'vcs_svn_tag'
410 SVN_TAG_ID = 'vcs_svn_tag'
411
411
412 ui_id = Column(
412 ui_id = Column(
413 "ui_id", Integer(), nullable=False, unique=True, default=None,
413 "ui_id", Integer(), nullable=False, unique=True, default=None,
414 primary_key=True)
414 primary_key=True)
415 ui_section = Column(
415 ui_section = Column(
416 "ui_section", String(255), nullable=True, unique=None, default=None)
416 "ui_section", String(255), nullable=True, unique=None, default=None)
417 ui_key = Column(
417 ui_key = Column(
418 "ui_key", String(255), nullable=True, unique=None, default=None)
418 "ui_key", String(255), nullable=True, unique=None, default=None)
419 ui_value = Column(
419 ui_value = Column(
420 "ui_value", String(255), nullable=True, unique=None, default=None)
420 "ui_value", String(255), nullable=True, unique=None, default=None)
421 ui_active = Column(
421 ui_active = Column(
422 "ui_active", Boolean(), nullable=True, unique=None, default=True)
422 "ui_active", Boolean(), nullable=True, unique=None, default=True)
423
423
424 def __repr__(self):
424 def __repr__(self):
425 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
425 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
426 self.ui_key, self.ui_value)
426 self.ui_key, self.ui_value)
427
427
428
428
429 class RepoRhodeCodeSetting(Base, BaseModel):
429 class RepoRhodeCodeSetting(Base, BaseModel):
430 __tablename__ = 'repo_rhodecode_settings'
430 __tablename__ = 'repo_rhodecode_settings'
431 __table_args__ = (
431 __table_args__ = (
432 UniqueConstraint(
432 UniqueConstraint(
433 'app_settings_name', 'repository_id',
433 'app_settings_name', 'repository_id',
434 name='uq_repo_rhodecode_setting_name_repo_id'),
434 name='uq_repo_rhodecode_setting_name_repo_id'),
435 base_table_args
435 base_table_args
436 )
436 )
437
437
438 repository_id = Column(
438 repository_id = Column(
439 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
439 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
440 nullable=False)
440 nullable=False)
441 app_settings_id = Column(
441 app_settings_id = Column(
442 "app_settings_id", Integer(), nullable=False, unique=True,
442 "app_settings_id", Integer(), nullable=False, unique=True,
443 default=None, primary_key=True)
443 default=None, primary_key=True)
444 app_settings_name = Column(
444 app_settings_name = Column(
445 "app_settings_name", String(255), nullable=True, unique=None,
445 "app_settings_name", String(255), nullable=True, unique=None,
446 default=None)
446 default=None)
447 _app_settings_value = Column(
447 _app_settings_value = Column(
448 "app_settings_value", String(4096), nullable=True, unique=None,
448 "app_settings_value", String(4096), nullable=True, unique=None,
449 default=None)
449 default=None)
450 _app_settings_type = Column(
450 _app_settings_type = Column(
451 "app_settings_type", String(255), nullable=True, unique=None,
451 "app_settings_type", String(255), nullable=True, unique=None,
452 default=None)
452 default=None)
453
453
454 repository = relationship('Repository')
454 repository = relationship('Repository')
455
455
456 def __init__(self, repository_id, key='', val='', type='unicode'):
456 def __init__(self, repository_id, key='', val='', type='unicode'):
457 self.repository_id = repository_id
457 self.repository_id = repository_id
458 self.app_settings_name = key
458 self.app_settings_name = key
459 self.app_settings_type = type
459 self.app_settings_type = type
460 self.app_settings_value = val
460 self.app_settings_value = val
461
461
462 @validates('_app_settings_value')
462 @validates('_app_settings_value')
463 def validate_settings_value(self, key, val):
463 def validate_settings_value(self, key, val):
464 assert type(val) == unicode
464 assert type(val) == unicode
465 return val
465 return val
466
466
467 @hybrid_property
467 @hybrid_property
468 def app_settings_value(self):
468 def app_settings_value(self):
469 v = self._app_settings_value
469 v = self._app_settings_value
470 type_ = self.app_settings_type
470 type_ = self.app_settings_type
471 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
471 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
472 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
472 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
473 return converter(v)
473 return converter(v)
474
474
475 @app_settings_value.setter
475 @app_settings_value.setter
476 def app_settings_value(self, val):
476 def app_settings_value(self, val):
477 """
477 """
478 Setter that will always make sure we use unicode in app_settings_value
478 Setter that will always make sure we use unicode in app_settings_value
479
479
480 :param val:
480 :param val:
481 """
481 """
482 self._app_settings_value = safe_unicode(val)
482 self._app_settings_value = safe_unicode(val)
483
483
484 @hybrid_property
484 @hybrid_property
485 def app_settings_type(self):
485 def app_settings_type(self):
486 return self._app_settings_type
486 return self._app_settings_type
487
487
488 @app_settings_type.setter
488 @app_settings_type.setter
489 def app_settings_type(self, val):
489 def app_settings_type(self, val):
490 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
490 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
491 if val not in SETTINGS_TYPES:
491 if val not in SETTINGS_TYPES:
492 raise Exception('type must be one of %s got %s'
492 raise Exception('type must be one of %s got %s'
493 % (SETTINGS_TYPES.keys(), val))
493 % (SETTINGS_TYPES.keys(), val))
494 self._app_settings_type = val
494 self._app_settings_type = val
495
495
496 def __unicode__(self):
496 def __unicode__(self):
497 return u"<%s('%s:%s:%s[%s]')>" % (
497 return u"<%s('%s:%s:%s[%s]')>" % (
498 self.__class__.__name__, self.repository.repo_name,
498 self.__class__.__name__, self.repository.repo_name,
499 self.app_settings_name, self.app_settings_value,
499 self.app_settings_name, self.app_settings_value,
500 self.app_settings_type
500 self.app_settings_type
501 )
501 )
502
502
503
503
504 class RepoRhodeCodeUi(Base, BaseModel):
504 class RepoRhodeCodeUi(Base, BaseModel):
505 __tablename__ = 'repo_rhodecode_ui'
505 __tablename__ = 'repo_rhodecode_ui'
506 __table_args__ = (
506 __table_args__ = (
507 UniqueConstraint(
507 UniqueConstraint(
508 'repository_id', 'ui_section', 'ui_key',
508 'repository_id', 'ui_section', 'ui_key',
509 name='uq_repo_rhodecode_ui_repository_id_section_key'),
509 name='uq_repo_rhodecode_ui_repository_id_section_key'),
510 base_table_args
510 base_table_args
511 )
511 )
512
512
513 repository_id = Column(
513 repository_id = Column(
514 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
514 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
515 nullable=False)
515 nullable=False)
516 ui_id = Column(
516 ui_id = Column(
517 "ui_id", Integer(), nullable=False, unique=True, default=None,
517 "ui_id", Integer(), nullable=False, unique=True, default=None,
518 primary_key=True)
518 primary_key=True)
519 ui_section = Column(
519 ui_section = Column(
520 "ui_section", String(255), nullable=True, unique=None, default=None)
520 "ui_section", String(255), nullable=True, unique=None, default=None)
521 ui_key = Column(
521 ui_key = Column(
522 "ui_key", String(255), nullable=True, unique=None, default=None)
522 "ui_key", String(255), nullable=True, unique=None, default=None)
523 ui_value = Column(
523 ui_value = Column(
524 "ui_value", String(255), nullable=True, unique=None, default=None)
524 "ui_value", String(255), nullable=True, unique=None, default=None)
525 ui_active = Column(
525 ui_active = Column(
526 "ui_active", Boolean(), nullable=True, unique=None, default=True)
526 "ui_active", Boolean(), nullable=True, unique=None, default=True)
527
527
528 repository = relationship('Repository')
528 repository = relationship('Repository')
529
529
530 def __repr__(self):
530 def __repr__(self):
531 return '<%s[%s:%s]%s=>%s]>' % (
531 return '<%s[%s:%s]%s=>%s]>' % (
532 self.__class__.__name__, self.repository.repo_name,
532 self.__class__.__name__, self.repository.repo_name,
533 self.ui_section, self.ui_key, self.ui_value)
533 self.ui_section, self.ui_key, self.ui_value)
534
534
535
535
536 class User(Base, BaseModel):
536 class User(Base, BaseModel):
537 __tablename__ = 'users'
537 __tablename__ = 'users'
538 __table_args__ = (
538 __table_args__ = (
539 UniqueConstraint('username'), UniqueConstraint('email'),
539 UniqueConstraint('username'), UniqueConstraint('email'),
540 Index('u_username_idx', 'username'),
540 Index('u_username_idx', 'username'),
541 Index('u_email_idx', 'email'),
541 Index('u_email_idx', 'email'),
542 base_table_args
542 base_table_args
543 )
543 )
544
544
545 DEFAULT_USER = 'default'
545 DEFAULT_USER = 'default'
546 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
546 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
547 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
547 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
548
548
549 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
549 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
550 username = Column("username", String(255), nullable=True, unique=None, default=None)
550 username = Column("username", String(255), nullable=True, unique=None, default=None)
551 password = Column("password", String(255), nullable=True, unique=None, default=None)
551 password = Column("password", String(255), nullable=True, unique=None, default=None)
552 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
552 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
553 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
553 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
554 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
554 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
555 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
555 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
556 _email = Column("email", String(255), nullable=True, unique=None, default=None)
556 _email = Column("email", String(255), nullable=True, unique=None, default=None)
557 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
557 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
558 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
558 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
559
559
560 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
560 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
561 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
561 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
562 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
562 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
563 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
563 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
564 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
564 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
565 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
565 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
566
566
567 user_log = relationship('UserLog')
567 user_log = relationship('UserLog')
568 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
568 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
569
569
570 repositories = relationship('Repository')
570 repositories = relationship('Repository')
571 repository_groups = relationship('RepoGroup')
571 repository_groups = relationship('RepoGroup')
572 user_groups = relationship('UserGroup')
572 user_groups = relationship('UserGroup')
573
573
574 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
574 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
575 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
575 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
576
576
577 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
577 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
578 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
578 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
579 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
579 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
580
580
581 group_member = relationship('UserGroupMember', cascade='all')
581 group_member = relationship('UserGroupMember', cascade='all')
582
582
583 notifications = relationship('UserNotification', cascade='all')
583 notifications = relationship('UserNotification', cascade='all')
584 # notifications assigned to this user
584 # notifications assigned to this user
585 user_created_notifications = relationship('Notification', cascade='all')
585 user_created_notifications = relationship('Notification', cascade='all')
586 # comments created by this user
586 # comments created by this user
587 user_comments = relationship('ChangesetComment', cascade='all')
587 user_comments = relationship('ChangesetComment', cascade='all')
588 # user profile extra info
588 # user profile extra info
589 user_emails = relationship('UserEmailMap', cascade='all')
589 user_emails = relationship('UserEmailMap', cascade='all')
590 user_ip_map = relationship('UserIpMap', cascade='all')
590 user_ip_map = relationship('UserIpMap', cascade='all')
591 user_auth_tokens = relationship('UserApiKeys', cascade='all')
591 user_auth_tokens = relationship('UserApiKeys', cascade='all')
592 user_ssh_keys = relationship('UserSshKeys', cascade='all')
592 user_ssh_keys = relationship('UserSshKeys', cascade='all')
593
593
594 # gists
594 # gists
595 user_gists = relationship('Gist', cascade='all')
595 user_gists = relationship('Gist', cascade='all')
596 # user pull requests
596 # user pull requests
597 user_pull_requests = relationship('PullRequest', cascade='all')
597 user_pull_requests = relationship('PullRequest', cascade='all')
598 # external identities
598 # external identities
599 extenal_identities = relationship(
599 extenal_identities = relationship(
600 'ExternalIdentity',
600 'ExternalIdentity',
601 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
601 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
602 cascade='all')
602 cascade='all')
603 # review rules
603 # review rules
604 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
604 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
605
605
606 def __unicode__(self):
606 def __unicode__(self):
607 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
607 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
608 self.user_id, self.username)
608 self.user_id, self.username)
609
609
610 @hybrid_property
610 @hybrid_property
611 def email(self):
611 def email(self):
612 return self._email
612 return self._email
613
613
614 @email.setter
614 @email.setter
615 def email(self, val):
615 def email(self, val):
616 self._email = val.lower() if val else None
616 self._email = val.lower() if val else None
617
617
618 @hybrid_property
618 @hybrid_property
619 def first_name(self):
619 def first_name(self):
620 from rhodecode.lib import helpers as h
620 from rhodecode.lib import helpers as h
621 if self.name:
621 if self.name:
622 return h.escape(self.name)
622 return h.escape(self.name)
623 return self.name
623 return self.name
624
624
625 @hybrid_property
625 @hybrid_property
626 def last_name(self):
626 def last_name(self):
627 from rhodecode.lib import helpers as h
627 from rhodecode.lib import helpers as h
628 if self.lastname:
628 if self.lastname:
629 return h.escape(self.lastname)
629 return h.escape(self.lastname)
630 return self.lastname
630 return self.lastname
631
631
632 @hybrid_property
632 @hybrid_property
633 def api_key(self):
633 def api_key(self):
634 """
634 """
635 Fetch if exist an auth-token with role ALL connected to this user
635 Fetch if exist an auth-token with role ALL connected to this user
636 """
636 """
637 user_auth_token = UserApiKeys.query()\
637 user_auth_token = UserApiKeys.query()\
638 .filter(UserApiKeys.user_id == self.user_id)\
638 .filter(UserApiKeys.user_id == self.user_id)\
639 .filter(or_(UserApiKeys.expires == -1,
639 .filter(or_(UserApiKeys.expires == -1,
640 UserApiKeys.expires >= time.time()))\
640 UserApiKeys.expires >= time.time()))\
641 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
641 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
642 if user_auth_token:
642 if user_auth_token:
643 user_auth_token = user_auth_token.api_key
643 user_auth_token = user_auth_token.api_key
644
644
645 return user_auth_token
645 return user_auth_token
646
646
647 @api_key.setter
647 @api_key.setter
648 def api_key(self, val):
648 def api_key(self, val):
649 # don't allow to set API key this is deprecated for now
649 # don't allow to set API key this is deprecated for now
650 self._api_key = None
650 self._api_key = None
651
651
652 @property
652 @property
653 def reviewer_pull_requests(self):
653 def reviewer_pull_requests(self):
654 return PullRequestReviewers.query() \
654 return PullRequestReviewers.query() \
655 .options(joinedload(PullRequestReviewers.pull_request)) \
655 .options(joinedload(PullRequestReviewers.pull_request)) \
656 .filter(PullRequestReviewers.user_id == self.user_id) \
656 .filter(PullRequestReviewers.user_id == self.user_id) \
657 .all()
657 .all()
658
658
659 @property
659 @property
660 def firstname(self):
660 def firstname(self):
661 # alias for future
661 # alias for future
662 return self.name
662 return self.name
663
663
664 @property
664 @property
665 def emails(self):
665 def emails(self):
666 other = UserEmailMap.query()\
666 other = UserEmailMap.query()\
667 .filter(UserEmailMap.user == self) \
667 .filter(UserEmailMap.user == self) \
668 .order_by(UserEmailMap.email_id.asc()) \
668 .order_by(UserEmailMap.email_id.asc()) \
669 .all()
669 .all()
670 return [self.email] + [x.email for x in other]
670 return [self.email] + [x.email for x in other]
671
671
672 @property
672 @property
673 def auth_tokens(self):
673 def auth_tokens(self):
674 auth_tokens = self.get_auth_tokens()
674 auth_tokens = self.get_auth_tokens()
675 return [x.api_key for x in auth_tokens]
675 return [x.api_key for x in auth_tokens]
676
676
677 def get_auth_tokens(self):
677 def get_auth_tokens(self):
678 return UserApiKeys.query()\
678 return UserApiKeys.query()\
679 .filter(UserApiKeys.user == self)\
679 .filter(UserApiKeys.user == self)\
680 .order_by(UserApiKeys.user_api_key_id.asc())\
680 .order_by(UserApiKeys.user_api_key_id.asc())\
681 .all()
681 .all()
682
682
683 @LazyProperty
683 @LazyProperty
684 def feed_token(self):
684 def feed_token(self):
685 return self.get_feed_token()
685 return self.get_feed_token()
686
686
687 def get_feed_token(self, cache=True):
687 def get_feed_token(self, cache=True):
688 feed_tokens = UserApiKeys.query()\
688 feed_tokens = UserApiKeys.query()\
689 .filter(UserApiKeys.user == self)\
689 .filter(UserApiKeys.user == self)\
690 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
690 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
691 if cache:
691 if cache:
692 feed_tokens = feed_tokens.options(
692 feed_tokens = feed_tokens.options(
693 FromCache("long_term", "get_user_feed_token_%s" % self.user_id))
693 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
694
694
695 feed_tokens = feed_tokens.all()
695 feed_tokens = feed_tokens.all()
696 if feed_tokens:
696 if feed_tokens:
697 return feed_tokens[0].api_key
697 return feed_tokens[0].api_key
698 return 'NO_FEED_TOKEN_AVAILABLE'
698 return 'NO_FEED_TOKEN_AVAILABLE'
699
699
700 @classmethod
700 @classmethod
701 def get(cls, user_id, cache=False):
701 def get(cls, user_id, cache=False):
702 if not user_id:
702 if not user_id:
703 return
703 return
704
704
705 user = cls.query()
705 user = cls.query()
706 if cache:
706 if cache:
707 user = user.options(
707 user = user.options(
708 FromCache("sql_cache_short", "get_users_%s" % user_id))
708 FromCache("sql_cache_short", "get_users_%s" % user_id))
709 return user.get(user_id)
709 return user.get(user_id)
710
710
711 @classmethod
711 @classmethod
712 def extra_valid_auth_tokens(cls, user, role=None):
712 def extra_valid_auth_tokens(cls, user, role=None):
713 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
713 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
714 .filter(or_(UserApiKeys.expires == -1,
714 .filter(or_(UserApiKeys.expires == -1,
715 UserApiKeys.expires >= time.time()))
715 UserApiKeys.expires >= time.time()))
716 if role:
716 if role:
717 tokens = tokens.filter(or_(UserApiKeys.role == role,
717 tokens = tokens.filter(or_(UserApiKeys.role == role,
718 UserApiKeys.role == UserApiKeys.ROLE_ALL))
718 UserApiKeys.role == UserApiKeys.ROLE_ALL))
719 return tokens.all()
719 return tokens.all()
720
720
721 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
721 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
722 from rhodecode.lib import auth
722 from rhodecode.lib import auth
723
723
724 log.debug('Trying to authenticate user: %s via auth-token, '
724 log.debug('Trying to authenticate user: %s via auth-token, '
725 'and roles: %s', self, roles)
725 'and roles: %s', self, roles)
726
726
727 if not auth_token:
727 if not auth_token:
728 return False
728 return False
729
729
730 crypto_backend = auth.crypto_backend()
730 crypto_backend = auth.crypto_backend()
731
731
732 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
732 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
733 tokens_q = UserApiKeys.query()\
733 tokens_q = UserApiKeys.query()\
734 .filter(UserApiKeys.user_id == self.user_id)\
734 .filter(UserApiKeys.user_id == self.user_id)\
735 .filter(or_(UserApiKeys.expires == -1,
735 .filter(or_(UserApiKeys.expires == -1,
736 UserApiKeys.expires >= time.time()))
736 UserApiKeys.expires >= time.time()))
737
737
738 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
738 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
739
739
740 plain_tokens = []
740 plain_tokens = []
741 hash_tokens = []
741 hash_tokens = []
742
742
743 for token in tokens_q.all():
743 for token in tokens_q.all():
744 # verify scope first
744 # verify scope first
745 if token.repo_id:
745 if token.repo_id:
746 # token has a scope, we need to verify it
746 # token has a scope, we need to verify it
747 if scope_repo_id != token.repo_id:
747 if scope_repo_id != token.repo_id:
748 log.debug(
748 log.debug(
749 'Scope mismatch: token has a set repo scope: %s, '
749 'Scope mismatch: token has a set repo scope: %s, '
750 'and calling scope is:%s, skipping further checks',
750 'and calling scope is:%s, skipping further checks',
751 token.repo, scope_repo_id)
751 token.repo, scope_repo_id)
752 # token has a scope, and it doesn't match, skip token
752 # token has a scope, and it doesn't match, skip token
753 continue
753 continue
754
754
755 if token.api_key.startswith(crypto_backend.ENC_PREF):
755 if token.api_key.startswith(crypto_backend.ENC_PREF):
756 hash_tokens.append(token.api_key)
756 hash_tokens.append(token.api_key)
757 else:
757 else:
758 plain_tokens.append(token.api_key)
758 plain_tokens.append(token.api_key)
759
759
760 is_plain_match = auth_token in plain_tokens
760 is_plain_match = auth_token in plain_tokens
761 if is_plain_match:
761 if is_plain_match:
762 return True
762 return True
763
763
764 for hashed in hash_tokens:
764 for hashed in hash_tokens:
765 # TODO(marcink): this is expensive to calculate, but most secure
765 # TODO(marcink): this is expensive to calculate, but most secure
766 match = crypto_backend.hash_check(auth_token, hashed)
766 match = crypto_backend.hash_check(auth_token, hashed)
767 if match:
767 if match:
768 return True
768 return True
769
769
770 return False
770 return False
771
771
772 @property
772 @property
773 def ip_addresses(self):
773 def ip_addresses(self):
774 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
774 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
775 return [x.ip_addr for x in ret]
775 return [x.ip_addr for x in ret]
776
776
777 @property
777 @property
778 def username_and_name(self):
778 def username_and_name(self):
779 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
779 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
780
780
781 @property
781 @property
782 def username_or_name_or_email(self):
782 def username_or_name_or_email(self):
783 full_name = self.full_name if self.full_name is not ' ' else None
783 full_name = self.full_name if self.full_name is not ' ' else None
784 return self.username or full_name or self.email
784 return self.username or full_name or self.email
785
785
786 @property
786 @property
787 def full_name(self):
787 def full_name(self):
788 return '%s %s' % (self.first_name, self.last_name)
788 return '%s %s' % (self.first_name, self.last_name)
789
789
790 @property
790 @property
791 def full_name_or_username(self):
791 def full_name_or_username(self):
792 return ('%s %s' % (self.first_name, self.last_name)
792 return ('%s %s' % (self.first_name, self.last_name)
793 if (self.first_name and self.last_name) else self.username)
793 if (self.first_name and self.last_name) else self.username)
794
794
795 @property
795 @property
796 def full_contact(self):
796 def full_contact(self):
797 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
797 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
798
798
799 @property
799 @property
800 def short_contact(self):
800 def short_contact(self):
801 return '%s %s' % (self.first_name, self.last_name)
801 return '%s %s' % (self.first_name, self.last_name)
802
802
803 @property
803 @property
804 def is_admin(self):
804 def is_admin(self):
805 return self.admin
805 return self.admin
806
806
807 def AuthUser(self, **kwargs):
807 def AuthUser(self, **kwargs):
808 """
808 """
809 Returns instance of AuthUser for this user
809 Returns instance of AuthUser for this user
810 """
810 """
811 from rhodecode.lib.auth import AuthUser
811 from rhodecode.lib.auth import AuthUser
812 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
812 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
813
813
814 @hybrid_property
814 @hybrid_property
815 def user_data(self):
815 def user_data(self):
816 if not self._user_data:
816 if not self._user_data:
817 return {}
817 return {}
818
818
819 try:
819 try:
820 return json.loads(self._user_data)
820 return json.loads(self._user_data)
821 except TypeError:
821 except TypeError:
822 return {}
822 return {}
823
823
824 @user_data.setter
824 @user_data.setter
825 def user_data(self, val):
825 def user_data(self, val):
826 if not isinstance(val, dict):
826 if not isinstance(val, dict):
827 raise Exception('user_data must be dict, got %s' % type(val))
827 raise Exception('user_data must be dict, got %s' % type(val))
828 try:
828 try:
829 self._user_data = json.dumps(val)
829 self._user_data = json.dumps(val)
830 except Exception:
830 except Exception:
831 log.error(traceback.format_exc())
831 log.error(traceback.format_exc())
832
832
833 @classmethod
833 @classmethod
834 def get_by_username(cls, username, case_insensitive=False,
834 def get_by_username(cls, username, case_insensitive=False,
835 cache=False, identity_cache=False):
835 cache=False, identity_cache=False):
836 session = Session()
836 session = Session()
837
837
838 if case_insensitive:
838 if case_insensitive:
839 q = cls.query().filter(
839 q = cls.query().filter(
840 func.lower(cls.username) == func.lower(username))
840 func.lower(cls.username) == func.lower(username))
841 else:
841 else:
842 q = cls.query().filter(cls.username == username)
842 q = cls.query().filter(cls.username == username)
843
843
844 if cache:
844 if cache:
845 if identity_cache:
845 if identity_cache:
846 val = cls.identity_cache(session, 'username', username)
846 val = cls.identity_cache(session, 'username', username)
847 if val:
847 if val:
848 return val
848 return val
849 else:
849 else:
850 cache_key = "get_user_by_name_%s" % _hash_key(username)
850 cache_key = "get_user_by_name_%s" % _hash_key(username)
851 q = q.options(
851 q = q.options(
852 FromCache("sql_cache_short", cache_key))
852 FromCache("sql_cache_short", cache_key))
853
853
854 return q.scalar()
854 return q.scalar()
855
855
856 @classmethod
856 @classmethod
857 def get_by_auth_token(cls, auth_token, cache=False):
857 def get_by_auth_token(cls, auth_token, cache=False):
858 q = UserApiKeys.query()\
858 q = UserApiKeys.query()\
859 .filter(UserApiKeys.api_key == auth_token)\
859 .filter(UserApiKeys.api_key == auth_token)\
860 .filter(or_(UserApiKeys.expires == -1,
860 .filter(or_(UserApiKeys.expires == -1,
861 UserApiKeys.expires >= time.time()))
861 UserApiKeys.expires >= time.time()))
862 if cache:
862 if cache:
863 q = q.options(
863 q = q.options(
864 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
864 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
865
865
866 match = q.first()
866 match = q.first()
867 if match:
867 if match:
868 return match.user
868 return match.user
869
869
870 @classmethod
870 @classmethod
871 def get_by_email(cls, email, case_insensitive=False, cache=False):
871 def get_by_email(cls, email, case_insensitive=False, cache=False):
872
872
873 if case_insensitive:
873 if case_insensitive:
874 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
874 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
875
875
876 else:
876 else:
877 q = cls.query().filter(cls.email == email)
877 q = cls.query().filter(cls.email == email)
878
878
879 email_key = _hash_key(email)
879 email_key = _hash_key(email)
880 if cache:
880 if cache:
881 q = q.options(
881 q = q.options(
882 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
882 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
883
883
884 ret = q.scalar()
884 ret = q.scalar()
885 if ret is None:
885 if ret is None:
886 q = UserEmailMap.query()
886 q = UserEmailMap.query()
887 # try fetching in alternate email map
887 # try fetching in alternate email map
888 if case_insensitive:
888 if case_insensitive:
889 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
889 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
890 else:
890 else:
891 q = q.filter(UserEmailMap.email == email)
891 q = q.filter(UserEmailMap.email == email)
892 q = q.options(joinedload(UserEmailMap.user))
892 q = q.options(joinedload(UserEmailMap.user))
893 if cache:
893 if cache:
894 q = q.options(
894 q = q.options(
895 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
895 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
896 ret = getattr(q.scalar(), 'user', None)
896 ret = getattr(q.scalar(), 'user', None)
897
897
898 return ret
898 return ret
899
899
900 @classmethod
900 @classmethod
901 def get_from_cs_author(cls, author):
901 def get_from_cs_author(cls, author):
902 """
902 """
903 Tries to get User objects out of commit author string
903 Tries to get User objects out of commit author string
904
904
905 :param author:
905 :param author:
906 """
906 """
907 from rhodecode.lib.helpers import email, author_name
907 from rhodecode.lib.helpers import email, author_name
908 # Valid email in the attribute passed, see if they're in the system
908 # Valid email in the attribute passed, see if they're in the system
909 _email = email(author)
909 _email = email(author)
910 if _email:
910 if _email:
911 user = cls.get_by_email(_email, case_insensitive=True)
911 user = cls.get_by_email(_email, case_insensitive=True)
912 if user:
912 if user:
913 return user
913 return user
914 # Maybe we can match by username?
914 # Maybe we can match by username?
915 _author = author_name(author)
915 _author = author_name(author)
916 user = cls.get_by_username(_author, case_insensitive=True)
916 user = cls.get_by_username(_author, case_insensitive=True)
917 if user:
917 if user:
918 return user
918 return user
919
919
920 def update_userdata(self, **kwargs):
920 def update_userdata(self, **kwargs):
921 usr = self
921 usr = self
922 old = usr.user_data
922 old = usr.user_data
923 old.update(**kwargs)
923 old.update(**kwargs)
924 usr.user_data = old
924 usr.user_data = old
925 Session().add(usr)
925 Session().add(usr)
926 log.debug('updated userdata with ', kwargs)
926 log.debug('updated userdata with ', kwargs)
927
927
928 def update_lastlogin(self):
928 def update_lastlogin(self):
929 """Update user lastlogin"""
929 """Update user lastlogin"""
930 self.last_login = datetime.datetime.now()
930 self.last_login = datetime.datetime.now()
931 Session().add(self)
931 Session().add(self)
932 log.debug('updated user %s lastlogin', self.username)
932 log.debug('updated user %s lastlogin', self.username)
933
933
934 def update_lastactivity(self):
934 def update_lastactivity(self):
935 """Update user lastactivity"""
935 """Update user lastactivity"""
936 self.last_activity = datetime.datetime.now()
936 self.last_activity = datetime.datetime.now()
937 Session().add(self)
937 Session().add(self)
938 log.debug('updated user `%s` last activity', self.username)
938 log.debug('updated user `%s` last activity', self.username)
939
939
940 def update_password(self, new_password):
940 def update_password(self, new_password):
941 from rhodecode.lib.auth import get_crypt_password
941 from rhodecode.lib.auth import get_crypt_password
942
942
943 self.password = get_crypt_password(new_password)
943 self.password = get_crypt_password(new_password)
944 Session().add(self)
944 Session().add(self)
945
945
946 @classmethod
946 @classmethod
947 def get_first_super_admin(cls):
947 def get_first_super_admin(cls):
948 user = User.query().filter(User.admin == true()).first()
948 user = User.query().filter(User.admin == true()).first()
949 if user is None:
949 if user is None:
950 raise Exception('FATAL: Missing administrative account!')
950 raise Exception('FATAL: Missing administrative account!')
951 return user
951 return user
952
952
953 @classmethod
953 @classmethod
954 def get_all_super_admins(cls):
954 def get_all_super_admins(cls):
955 """
955 """
956 Returns all admin accounts sorted by username
956 Returns all admin accounts sorted by username
957 """
957 """
958 return User.query().filter(User.admin == true())\
958 return User.query().filter(User.admin == true())\
959 .order_by(User.username.asc()).all()
959 .order_by(User.username.asc()).all()
960
960
961 @classmethod
961 @classmethod
962 def get_default_user(cls, cache=False, refresh=False):
962 def get_default_user(cls, cache=False, refresh=False):
963 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
963 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
964 if user is None:
964 if user is None:
965 raise Exception('FATAL: Missing default account!')
965 raise Exception('FATAL: Missing default account!')
966 if refresh:
966 if refresh:
967 # The default user might be based on outdated state which
967 # The default user might be based on outdated state which
968 # has been loaded from the cache.
968 # has been loaded from the cache.
969 # A call to refresh() ensures that the
969 # A call to refresh() ensures that the
970 # latest state from the database is used.
970 # latest state from the database is used.
971 Session().refresh(user)
971 Session().refresh(user)
972 return user
972 return user
973
973
974 def _get_default_perms(self, user, suffix=''):
974 def _get_default_perms(self, user, suffix=''):
975 from rhodecode.model.permission import PermissionModel
975 from rhodecode.model.permission import PermissionModel
976 return PermissionModel().get_default_perms(user.user_perms, suffix)
976 return PermissionModel().get_default_perms(user.user_perms, suffix)
977
977
978 def get_default_perms(self, suffix=''):
978 def get_default_perms(self, suffix=''):
979 return self._get_default_perms(self, suffix)
979 return self._get_default_perms(self, suffix)
980
980
981 def get_api_data(self, include_secrets=False, details='full'):
981 def get_api_data(self, include_secrets=False, details='full'):
982 """
982 """
983 Common function for generating user related data for API
983 Common function for generating user related data for API
984
984
985 :param include_secrets: By default secrets in the API data will be replaced
985 :param include_secrets: By default secrets in the API data will be replaced
986 by a placeholder value to prevent exposing this data by accident. In case
986 by a placeholder value to prevent exposing this data by accident. In case
987 this data shall be exposed, set this flag to ``True``.
987 this data shall be exposed, set this flag to ``True``.
988
988
989 :param details: details can be 'basic|full' basic gives only a subset of
989 :param details: details can be 'basic|full' basic gives only a subset of
990 the available user information that includes user_id, name and emails.
990 the available user information that includes user_id, name and emails.
991 """
991 """
992 user = self
992 user = self
993 user_data = self.user_data
993 user_data = self.user_data
994 data = {
994 data = {
995 'user_id': user.user_id,
995 'user_id': user.user_id,
996 'username': user.username,
996 'username': user.username,
997 'firstname': user.name,
997 'firstname': user.name,
998 'lastname': user.lastname,
998 'lastname': user.lastname,
999 'email': user.email,
999 'email': user.email,
1000 'emails': user.emails,
1000 'emails': user.emails,
1001 }
1001 }
1002 if details == 'basic':
1002 if details == 'basic':
1003 return data
1003 return data
1004
1004
1005 auth_token_length = 40
1005 auth_token_length = 40
1006 auth_token_replacement = '*' * auth_token_length
1006 auth_token_replacement = '*' * auth_token_length
1007
1007
1008 extras = {
1008 extras = {
1009 'auth_tokens': [auth_token_replacement],
1009 'auth_tokens': [auth_token_replacement],
1010 'active': user.active,
1010 'active': user.active,
1011 'admin': user.admin,
1011 'admin': user.admin,
1012 'extern_type': user.extern_type,
1012 'extern_type': user.extern_type,
1013 'extern_name': user.extern_name,
1013 'extern_name': user.extern_name,
1014 'last_login': user.last_login,
1014 'last_login': user.last_login,
1015 'last_activity': user.last_activity,
1015 'last_activity': user.last_activity,
1016 'ip_addresses': user.ip_addresses,
1016 'ip_addresses': user.ip_addresses,
1017 'language': user_data.get('language')
1017 'language': user_data.get('language')
1018 }
1018 }
1019 data.update(extras)
1019 data.update(extras)
1020
1020
1021 if include_secrets:
1021 if include_secrets:
1022 data['auth_tokens'] = user.auth_tokens
1022 data['auth_tokens'] = user.auth_tokens
1023 return data
1023 return data
1024
1024
1025 def __json__(self):
1025 def __json__(self):
1026 data = {
1026 data = {
1027 'full_name': self.full_name,
1027 'full_name': self.full_name,
1028 'full_name_or_username': self.full_name_or_username,
1028 'full_name_or_username': self.full_name_or_username,
1029 'short_contact': self.short_contact,
1029 'short_contact': self.short_contact,
1030 'full_contact': self.full_contact,
1030 'full_contact': self.full_contact,
1031 }
1031 }
1032 data.update(self.get_api_data())
1032 data.update(self.get_api_data())
1033 return data
1033 return data
1034
1034
1035
1035
1036 class UserApiKeys(Base, BaseModel):
1036 class UserApiKeys(Base, BaseModel):
1037 __tablename__ = 'user_api_keys'
1037 __tablename__ = 'user_api_keys'
1038 __table_args__ = (
1038 __table_args__ = (
1039 Index('uak_api_key_idx', 'api_key', unique=True),
1039 Index('uak_api_key_idx', 'api_key', unique=True),
1040 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1040 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1041 base_table_args
1041 base_table_args
1042 )
1042 )
1043 __mapper_args__ = {}
1043 __mapper_args__ = {}
1044
1044
1045 # ApiKey role
1045 # ApiKey role
1046 ROLE_ALL = 'token_role_all'
1046 ROLE_ALL = 'token_role_all'
1047 ROLE_HTTP = 'token_role_http'
1047 ROLE_HTTP = 'token_role_http'
1048 ROLE_VCS = 'token_role_vcs'
1048 ROLE_VCS = 'token_role_vcs'
1049 ROLE_API = 'token_role_api'
1049 ROLE_API = 'token_role_api'
1050 ROLE_FEED = 'token_role_feed'
1050 ROLE_FEED = 'token_role_feed'
1051 ROLE_PASSWORD_RESET = 'token_password_reset'
1051 ROLE_PASSWORD_RESET = 'token_password_reset'
1052
1052
1053 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1053 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1054
1054
1055 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1055 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1056 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1056 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1057 api_key = Column("api_key", String(255), nullable=False, unique=True)
1057 api_key = Column("api_key", String(255), nullable=False, unique=True)
1058 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1058 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1059 expires = Column('expires', Float(53), nullable=False)
1059 expires = Column('expires', Float(53), nullable=False)
1060 role = Column('role', String(255), nullable=True)
1060 role = Column('role', String(255), nullable=True)
1061 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1061 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1062
1062
1063 # scope columns
1063 # scope columns
1064 repo_id = Column(
1064 repo_id = Column(
1065 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1065 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1066 nullable=True, unique=None, default=None)
1066 nullable=True, unique=None, default=None)
1067 repo = relationship('Repository', lazy='joined')
1067 repo = relationship('Repository', lazy='joined')
1068
1068
1069 repo_group_id = Column(
1069 repo_group_id = Column(
1070 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1070 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1071 nullable=True, unique=None, default=None)
1071 nullable=True, unique=None, default=None)
1072 repo_group = relationship('RepoGroup', lazy='joined')
1072 repo_group = relationship('RepoGroup', lazy='joined')
1073
1073
1074 user = relationship('User', lazy='joined')
1074 user = relationship('User', lazy='joined')
1075
1075
1076 def __unicode__(self):
1076 def __unicode__(self):
1077 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1077 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1078
1078
1079 def __json__(self):
1079 def __json__(self):
1080 data = {
1080 data = {
1081 'auth_token': self.api_key,
1081 'auth_token': self.api_key,
1082 'role': self.role,
1082 'role': self.role,
1083 'scope': self.scope_humanized,
1083 'scope': self.scope_humanized,
1084 'expired': self.expired
1084 'expired': self.expired
1085 }
1085 }
1086 return data
1086 return data
1087
1087
1088 def get_api_data(self, include_secrets=False):
1088 def get_api_data(self, include_secrets=False):
1089 data = self.__json__()
1089 data = self.__json__()
1090 if include_secrets:
1090 if include_secrets:
1091 return data
1091 return data
1092 else:
1092 else:
1093 data['auth_token'] = self.token_obfuscated
1093 data['auth_token'] = self.token_obfuscated
1094 return data
1094 return data
1095
1095
1096 @hybrid_property
1096 @hybrid_property
1097 def description_safe(self):
1097 def description_safe(self):
1098 from rhodecode.lib import helpers as h
1098 from rhodecode.lib import helpers as h
1099 return h.escape(self.description)
1099 return h.escape(self.description)
1100
1100
1101 @property
1101 @property
1102 def expired(self):
1102 def expired(self):
1103 if self.expires == -1:
1103 if self.expires == -1:
1104 return False
1104 return False
1105 return time.time() > self.expires
1105 return time.time() > self.expires
1106
1106
1107 @classmethod
1107 @classmethod
1108 def _get_role_name(cls, role):
1108 def _get_role_name(cls, role):
1109 return {
1109 return {
1110 cls.ROLE_ALL: _('all'),
1110 cls.ROLE_ALL: _('all'),
1111 cls.ROLE_HTTP: _('http/web interface'),
1111 cls.ROLE_HTTP: _('http/web interface'),
1112 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1112 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1113 cls.ROLE_API: _('api calls'),
1113 cls.ROLE_API: _('api calls'),
1114 cls.ROLE_FEED: _('feed access'),
1114 cls.ROLE_FEED: _('feed access'),
1115 }.get(role, role)
1115 }.get(role, role)
1116
1116
1117 @property
1117 @property
1118 def role_humanized(self):
1118 def role_humanized(self):
1119 return self._get_role_name(self.role)
1119 return self._get_role_name(self.role)
1120
1120
1121 def _get_scope(self):
1121 def _get_scope(self):
1122 if self.repo:
1122 if self.repo:
1123 return repr(self.repo)
1123 return repr(self.repo)
1124 if self.repo_group:
1124 if self.repo_group:
1125 return repr(self.repo_group) + ' (recursive)'
1125 return repr(self.repo_group) + ' (recursive)'
1126 return 'global'
1126 return 'global'
1127
1127
1128 @property
1128 @property
1129 def scope_humanized(self):
1129 def scope_humanized(self):
1130 return self._get_scope()
1130 return self._get_scope()
1131
1131
1132 @property
1132 @property
1133 def token_obfuscated(self):
1133 def token_obfuscated(self):
1134 if self.api_key:
1134 if self.api_key:
1135 return self.api_key[:4] + "****"
1135 return self.api_key[:4] + "****"
1136
1136
1137
1137
1138 class UserEmailMap(Base, BaseModel):
1138 class UserEmailMap(Base, BaseModel):
1139 __tablename__ = 'user_email_map'
1139 __tablename__ = 'user_email_map'
1140 __table_args__ = (
1140 __table_args__ = (
1141 Index('uem_email_idx', 'email'),
1141 Index('uem_email_idx', 'email'),
1142 UniqueConstraint('email'),
1142 UniqueConstraint('email'),
1143 base_table_args
1143 base_table_args
1144 )
1144 )
1145 __mapper_args__ = {}
1145 __mapper_args__ = {}
1146
1146
1147 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1147 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1148 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1148 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1149 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1149 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1150 user = relationship('User', lazy='joined')
1150 user = relationship('User', lazy='joined')
1151
1151
1152 @validates('_email')
1152 @validates('_email')
1153 def validate_email(self, key, email):
1153 def validate_email(self, key, email):
1154 # check if this email is not main one
1154 # check if this email is not main one
1155 main_email = Session().query(User).filter(User.email == email).scalar()
1155 main_email = Session().query(User).filter(User.email == email).scalar()
1156 if main_email is not None:
1156 if main_email is not None:
1157 raise AttributeError('email %s is present is user table' % email)
1157 raise AttributeError('email %s is present is user table' % email)
1158 return email
1158 return email
1159
1159
1160 @hybrid_property
1160 @hybrid_property
1161 def email(self):
1161 def email(self):
1162 return self._email
1162 return self._email
1163
1163
1164 @email.setter
1164 @email.setter
1165 def email(self, val):
1165 def email(self, val):
1166 self._email = val.lower() if val else None
1166 self._email = val.lower() if val else None
1167
1167
1168
1168
1169 class UserIpMap(Base, BaseModel):
1169 class UserIpMap(Base, BaseModel):
1170 __tablename__ = 'user_ip_map'
1170 __tablename__ = 'user_ip_map'
1171 __table_args__ = (
1171 __table_args__ = (
1172 UniqueConstraint('user_id', 'ip_addr'),
1172 UniqueConstraint('user_id', 'ip_addr'),
1173 base_table_args
1173 base_table_args
1174 )
1174 )
1175 __mapper_args__ = {}
1175 __mapper_args__ = {}
1176
1176
1177 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1177 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1178 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1178 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1179 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1179 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1180 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1180 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1181 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1181 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1182 user = relationship('User', lazy='joined')
1182 user = relationship('User', lazy='joined')
1183
1183
1184 @hybrid_property
1184 @hybrid_property
1185 def description_safe(self):
1185 def description_safe(self):
1186 from rhodecode.lib import helpers as h
1186 from rhodecode.lib import helpers as h
1187 return h.escape(self.description)
1187 return h.escape(self.description)
1188
1188
1189 @classmethod
1189 @classmethod
1190 def _get_ip_range(cls, ip_addr):
1190 def _get_ip_range(cls, ip_addr):
1191 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1191 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1192 return [str(net.network_address), str(net.broadcast_address)]
1192 return [str(net.network_address), str(net.broadcast_address)]
1193
1193
1194 def __json__(self):
1194 def __json__(self):
1195 return {
1195 return {
1196 'ip_addr': self.ip_addr,
1196 'ip_addr': self.ip_addr,
1197 'ip_range': self._get_ip_range(self.ip_addr),
1197 'ip_range': self._get_ip_range(self.ip_addr),
1198 }
1198 }
1199
1199
1200 def __unicode__(self):
1200 def __unicode__(self):
1201 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1201 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1202 self.user_id, self.ip_addr)
1202 self.user_id, self.ip_addr)
1203
1203
1204
1204
1205 class UserSshKeys(Base, BaseModel):
1205 class UserSshKeys(Base, BaseModel):
1206 __tablename__ = 'user_ssh_keys'
1206 __tablename__ = 'user_ssh_keys'
1207 __table_args__ = (
1207 __table_args__ = (
1208 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1208 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1209
1209
1210 UniqueConstraint('ssh_key_fingerprint'),
1210 UniqueConstraint('ssh_key_fingerprint'),
1211
1211
1212 base_table_args
1212 base_table_args
1213 )
1213 )
1214 __mapper_args__ = {}
1214 __mapper_args__ = {}
1215
1215
1216 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1216 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1217 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1217 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1218 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1218 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1219
1219
1220 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1220 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1221
1221
1222 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1222 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1223 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1223 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1224 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1224 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1225
1225
1226 user = relationship('User', lazy='joined')
1226 user = relationship('User', lazy='joined')
1227
1227
1228 def __json__(self):
1228 def __json__(self):
1229 data = {
1229 data = {
1230 'ssh_fingerprint': self.ssh_key_fingerprint,
1230 'ssh_fingerprint': self.ssh_key_fingerprint,
1231 'description': self.description,
1231 'description': self.description,
1232 'created_on': self.created_on
1232 'created_on': self.created_on
1233 }
1233 }
1234 return data
1234 return data
1235
1235
1236 def get_api_data(self):
1236 def get_api_data(self):
1237 data = self.__json__()
1237 data = self.__json__()
1238 return data
1238 return data
1239
1239
1240
1240
1241 class UserLog(Base, BaseModel):
1241 class UserLog(Base, BaseModel):
1242 __tablename__ = 'user_logs'
1242 __tablename__ = 'user_logs'
1243 __table_args__ = (
1243 __table_args__ = (
1244 base_table_args,
1244 base_table_args,
1245 )
1245 )
1246
1246
1247 VERSION_1 = 'v1'
1247 VERSION_1 = 'v1'
1248 VERSION_2 = 'v2'
1248 VERSION_2 = 'v2'
1249 VERSIONS = [VERSION_1, VERSION_2]
1249 VERSIONS = [VERSION_1, VERSION_2]
1250
1250
1251 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1251 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1252 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1252 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1253 username = Column("username", String(255), nullable=True, unique=None, default=None)
1253 username = Column("username", String(255), nullable=True, unique=None, default=None)
1254 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1254 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1255 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1255 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1256 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1256 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1257 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1257 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1258 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1258 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1259
1259
1260 version = Column("version", String(255), nullable=True, default=VERSION_1)
1260 version = Column("version", String(255), nullable=True, default=VERSION_1)
1261 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1261 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1262 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1262 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1263
1263
1264 def __unicode__(self):
1264 def __unicode__(self):
1265 return u"<%s('id:%s:%s')>" % (
1265 return u"<%s('id:%s:%s')>" % (
1266 self.__class__.__name__, self.repository_name, self.action)
1266 self.__class__.__name__, self.repository_name, self.action)
1267
1267
1268 def __json__(self):
1268 def __json__(self):
1269 return {
1269 return {
1270 'user_id': self.user_id,
1270 'user_id': self.user_id,
1271 'username': self.username,
1271 'username': self.username,
1272 'repository_id': self.repository_id,
1272 'repository_id': self.repository_id,
1273 'repository_name': self.repository_name,
1273 'repository_name': self.repository_name,
1274 'user_ip': self.user_ip,
1274 'user_ip': self.user_ip,
1275 'action_date': self.action_date,
1275 'action_date': self.action_date,
1276 'action': self.action,
1276 'action': self.action,
1277 }
1277 }
1278
1278
1279 @hybrid_property
1279 @hybrid_property
1280 def entry_id(self):
1280 def entry_id(self):
1281 return self.user_log_id
1281 return self.user_log_id
1282
1282
1283 @property
1283 @property
1284 def action_as_day(self):
1284 def action_as_day(self):
1285 return datetime.date(*self.action_date.timetuple()[:3])
1285 return datetime.date(*self.action_date.timetuple()[:3])
1286
1286
1287 user = relationship('User')
1287 user = relationship('User')
1288 repository = relationship('Repository', cascade='')
1288 repository = relationship('Repository', cascade='')
1289
1289
1290
1290
1291 class UserGroup(Base, BaseModel):
1291 class UserGroup(Base, BaseModel):
1292 __tablename__ = 'users_groups'
1292 __tablename__ = 'users_groups'
1293 __table_args__ = (
1293 __table_args__ = (
1294 base_table_args,
1294 base_table_args,
1295 )
1295 )
1296
1296
1297 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1297 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1298 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1298 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1299 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1299 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1300 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1300 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1301 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1301 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1302 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1302 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1303 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1303 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1304 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1304 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1305
1305
1306 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1306 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1307 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1307 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1308 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1308 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1309 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1309 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1310 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1310 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1311 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1311 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1312
1312
1313 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1313 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1314 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1314 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1315
1315
1316 @classmethod
1316 @classmethod
1317 def _load_group_data(cls, column):
1317 def _load_group_data(cls, column):
1318 if not column:
1318 if not column:
1319 return {}
1319 return {}
1320
1320
1321 try:
1321 try:
1322 return json.loads(column) or {}
1322 return json.loads(column) or {}
1323 except TypeError:
1323 except TypeError:
1324 return {}
1324 return {}
1325
1325
1326 @hybrid_property
1326 @hybrid_property
1327 def description_safe(self):
1327 def description_safe(self):
1328 from rhodecode.lib import helpers as h
1328 from rhodecode.lib import helpers as h
1329 return h.escape(self.user_group_description)
1329 return h.escape(self.user_group_description)
1330
1330
1331 @hybrid_property
1331 @hybrid_property
1332 def group_data(self):
1332 def group_data(self):
1333 return self._load_group_data(self._group_data)
1333 return self._load_group_data(self._group_data)
1334
1334
1335 @group_data.expression
1335 @group_data.expression
1336 def group_data(self, **kwargs):
1336 def group_data(self, **kwargs):
1337 return self._group_data
1337 return self._group_data
1338
1338
1339 @group_data.setter
1339 @group_data.setter
1340 def group_data(self, val):
1340 def group_data(self, val):
1341 try:
1341 try:
1342 self._group_data = json.dumps(val)
1342 self._group_data = json.dumps(val)
1343 except Exception:
1343 except Exception:
1344 log.error(traceback.format_exc())
1344 log.error(traceback.format_exc())
1345
1345
1346 @classmethod
1346 @classmethod
1347 def _load_sync(cls, group_data):
1347 def _load_sync(cls, group_data):
1348 if group_data:
1348 if group_data:
1349 return group_data.get('extern_type')
1349 return group_data.get('extern_type')
1350
1350
1351 @property
1351 @property
1352 def sync(self):
1352 def sync(self):
1353 return self._load_sync(self.group_data)
1353 return self._load_sync(self.group_data)
1354
1354
1355 def __unicode__(self):
1355 def __unicode__(self):
1356 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1356 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1357 self.users_group_id,
1357 self.users_group_id,
1358 self.users_group_name)
1358 self.users_group_name)
1359
1359
1360 @classmethod
1360 @classmethod
1361 def get_by_group_name(cls, group_name, cache=False,
1361 def get_by_group_name(cls, group_name, cache=False,
1362 case_insensitive=False):
1362 case_insensitive=False):
1363 if case_insensitive:
1363 if case_insensitive:
1364 q = cls.query().filter(func.lower(cls.users_group_name) ==
1364 q = cls.query().filter(func.lower(cls.users_group_name) ==
1365 func.lower(group_name))
1365 func.lower(group_name))
1366
1366
1367 else:
1367 else:
1368 q = cls.query().filter(cls.users_group_name == group_name)
1368 q = cls.query().filter(cls.users_group_name == group_name)
1369 if cache:
1369 if cache:
1370 q = q.options(
1370 q = q.options(
1371 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1371 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1372 return q.scalar()
1372 return q.scalar()
1373
1373
1374 @classmethod
1374 @classmethod
1375 def get(cls, user_group_id, cache=False):
1375 def get(cls, user_group_id, cache=False):
1376 if not user_group_id:
1376 if not user_group_id:
1377 return
1377 return
1378
1378
1379 user_group = cls.query()
1379 user_group = cls.query()
1380 if cache:
1380 if cache:
1381 user_group = user_group.options(
1381 user_group = user_group.options(
1382 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1382 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1383 return user_group.get(user_group_id)
1383 return user_group.get(user_group_id)
1384
1384
1385 def permissions(self, with_admins=True, with_owner=True):
1385 def permissions(self, with_admins=True, with_owner=True):
1386 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1386 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1387 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1387 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1388 joinedload(UserUserGroupToPerm.user),
1388 joinedload(UserUserGroupToPerm.user),
1389 joinedload(UserUserGroupToPerm.permission),)
1389 joinedload(UserUserGroupToPerm.permission),)
1390
1390
1391 # get owners and admins and permissions. We do a trick of re-writing
1391 # get owners and admins and permissions. We do a trick of re-writing
1392 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1392 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1393 # has a global reference and changing one object propagates to all
1393 # has a global reference and changing one object propagates to all
1394 # others. This means if admin is also an owner admin_row that change
1394 # others. This means if admin is also an owner admin_row that change
1395 # would propagate to both objects
1395 # would propagate to both objects
1396 perm_rows = []
1396 perm_rows = []
1397 for _usr in q.all():
1397 for _usr in q.all():
1398 usr = AttributeDict(_usr.user.get_dict())
1398 usr = AttributeDict(_usr.user.get_dict())
1399 usr.permission = _usr.permission.permission_name
1399 usr.permission = _usr.permission.permission_name
1400 perm_rows.append(usr)
1400 perm_rows.append(usr)
1401
1401
1402 # filter the perm rows by 'default' first and then sort them by
1402 # filter the perm rows by 'default' first and then sort them by
1403 # admin,write,read,none permissions sorted again alphabetically in
1403 # admin,write,read,none permissions sorted again alphabetically in
1404 # each group
1404 # each group
1405 perm_rows = sorted(perm_rows, key=display_user_sort)
1405 perm_rows = sorted(perm_rows, key=display_user_sort)
1406
1406
1407 _admin_perm = 'usergroup.admin'
1407 _admin_perm = 'usergroup.admin'
1408 owner_row = []
1408 owner_row = []
1409 if with_owner:
1409 if with_owner:
1410 usr = AttributeDict(self.user.get_dict())
1410 usr = AttributeDict(self.user.get_dict())
1411 usr.owner_row = True
1411 usr.owner_row = True
1412 usr.permission = _admin_perm
1412 usr.permission = _admin_perm
1413 owner_row.append(usr)
1413 owner_row.append(usr)
1414
1414
1415 super_admin_rows = []
1415 super_admin_rows = []
1416 if with_admins:
1416 if with_admins:
1417 for usr in User.get_all_super_admins():
1417 for usr in User.get_all_super_admins():
1418 # if this admin is also owner, don't double the record
1418 # if this admin is also owner, don't double the record
1419 if usr.user_id == owner_row[0].user_id:
1419 if usr.user_id == owner_row[0].user_id:
1420 owner_row[0].admin_row = True
1420 owner_row[0].admin_row = True
1421 else:
1421 else:
1422 usr = AttributeDict(usr.get_dict())
1422 usr = AttributeDict(usr.get_dict())
1423 usr.admin_row = True
1423 usr.admin_row = True
1424 usr.permission = _admin_perm
1424 usr.permission = _admin_perm
1425 super_admin_rows.append(usr)
1425 super_admin_rows.append(usr)
1426
1426
1427 return super_admin_rows + owner_row + perm_rows
1427 return super_admin_rows + owner_row + perm_rows
1428
1428
1429 def permission_user_groups(self):
1429 def permission_user_groups(self):
1430 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1430 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1431 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1431 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1432 joinedload(UserGroupUserGroupToPerm.target_user_group),
1432 joinedload(UserGroupUserGroupToPerm.target_user_group),
1433 joinedload(UserGroupUserGroupToPerm.permission),)
1433 joinedload(UserGroupUserGroupToPerm.permission),)
1434
1434
1435 perm_rows = []
1435 perm_rows = []
1436 for _user_group in q.all():
1436 for _user_group in q.all():
1437 usr = AttributeDict(_user_group.user_group.get_dict())
1437 usr = AttributeDict(_user_group.user_group.get_dict())
1438 usr.permission = _user_group.permission.permission_name
1438 usr.permission = _user_group.permission.permission_name
1439 perm_rows.append(usr)
1439 perm_rows.append(usr)
1440
1440
1441 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1441 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1442 return perm_rows
1442 return perm_rows
1443
1443
1444 def _get_default_perms(self, user_group, suffix=''):
1444 def _get_default_perms(self, user_group, suffix=''):
1445 from rhodecode.model.permission import PermissionModel
1445 from rhodecode.model.permission import PermissionModel
1446 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1446 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1447
1447
1448 def get_default_perms(self, suffix=''):
1448 def get_default_perms(self, suffix=''):
1449 return self._get_default_perms(self, suffix)
1449 return self._get_default_perms(self, suffix)
1450
1450
1451 def get_api_data(self, with_group_members=True, include_secrets=False):
1451 def get_api_data(self, with_group_members=True, include_secrets=False):
1452 """
1452 """
1453 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1453 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1454 basically forwarded.
1454 basically forwarded.
1455
1455
1456 """
1456 """
1457 user_group = self
1457 user_group = self
1458 data = {
1458 data = {
1459 'users_group_id': user_group.users_group_id,
1459 'users_group_id': user_group.users_group_id,
1460 'group_name': user_group.users_group_name,
1460 'group_name': user_group.users_group_name,
1461 'group_description': user_group.user_group_description,
1461 'group_description': user_group.user_group_description,
1462 'active': user_group.users_group_active,
1462 'active': user_group.users_group_active,
1463 'owner': user_group.user.username,
1463 'owner': user_group.user.username,
1464 'sync': user_group.sync,
1464 'sync': user_group.sync,
1465 'owner_email': user_group.user.email,
1465 'owner_email': user_group.user.email,
1466 }
1466 }
1467
1467
1468 if with_group_members:
1468 if with_group_members:
1469 users = []
1469 users = []
1470 for user in user_group.members:
1470 for user in user_group.members:
1471 user = user.user
1471 user = user.user
1472 users.append(user.get_api_data(include_secrets=include_secrets))
1472 users.append(user.get_api_data(include_secrets=include_secrets))
1473 data['users'] = users
1473 data['users'] = users
1474
1474
1475 return data
1475 return data
1476
1476
1477
1477
1478 class UserGroupMember(Base, BaseModel):
1478 class UserGroupMember(Base, BaseModel):
1479 __tablename__ = 'users_groups_members'
1479 __tablename__ = 'users_groups_members'
1480 __table_args__ = (
1480 __table_args__ = (
1481 base_table_args,
1481 base_table_args,
1482 )
1482 )
1483
1483
1484 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1484 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1485 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1485 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1486 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1486 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1487
1487
1488 user = relationship('User', lazy='joined')
1488 user = relationship('User', lazy='joined')
1489 users_group = relationship('UserGroup')
1489 users_group = relationship('UserGroup')
1490
1490
1491 def __init__(self, gr_id='', u_id=''):
1491 def __init__(self, gr_id='', u_id=''):
1492 self.users_group_id = gr_id
1492 self.users_group_id = gr_id
1493 self.user_id = u_id
1493 self.user_id = u_id
1494
1494
1495
1495
1496 class RepositoryField(Base, BaseModel):
1496 class RepositoryField(Base, BaseModel):
1497 __tablename__ = 'repositories_fields'
1497 __tablename__ = 'repositories_fields'
1498 __table_args__ = (
1498 __table_args__ = (
1499 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1499 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1500 base_table_args,
1500 base_table_args,
1501 )
1501 )
1502
1502
1503 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1503 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1504
1504
1505 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1505 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1506 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1506 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1507 field_key = Column("field_key", String(250))
1507 field_key = Column("field_key", String(250))
1508 field_label = Column("field_label", String(1024), nullable=False)
1508 field_label = Column("field_label", String(1024), nullable=False)
1509 field_value = Column("field_value", String(10000), nullable=False)
1509 field_value = Column("field_value", String(10000), nullable=False)
1510 field_desc = Column("field_desc", String(1024), nullable=False)
1510 field_desc = Column("field_desc", String(1024), nullable=False)
1511 field_type = Column("field_type", String(255), nullable=False, unique=None)
1511 field_type = Column("field_type", String(255), nullable=False, unique=None)
1512 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1512 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1513
1513
1514 repository = relationship('Repository')
1514 repository = relationship('Repository')
1515
1515
1516 @property
1516 @property
1517 def field_key_prefixed(self):
1517 def field_key_prefixed(self):
1518 return 'ex_%s' % self.field_key
1518 return 'ex_%s' % self.field_key
1519
1519
1520 @classmethod
1520 @classmethod
1521 def un_prefix_key(cls, key):
1521 def un_prefix_key(cls, key):
1522 if key.startswith(cls.PREFIX):
1522 if key.startswith(cls.PREFIX):
1523 return key[len(cls.PREFIX):]
1523 return key[len(cls.PREFIX):]
1524 return key
1524 return key
1525
1525
1526 @classmethod
1526 @classmethod
1527 def get_by_key_name(cls, key, repo):
1527 def get_by_key_name(cls, key, repo):
1528 row = cls.query()\
1528 row = cls.query()\
1529 .filter(cls.repository == repo)\
1529 .filter(cls.repository == repo)\
1530 .filter(cls.field_key == key).scalar()
1530 .filter(cls.field_key == key).scalar()
1531 return row
1531 return row
1532
1532
1533
1533
1534 class Repository(Base, BaseModel):
1534 class Repository(Base, BaseModel):
1535 __tablename__ = 'repositories'
1535 __tablename__ = 'repositories'
1536 __table_args__ = (
1536 __table_args__ = (
1537 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1537 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1538 base_table_args,
1538 base_table_args,
1539 )
1539 )
1540 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1540 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1541 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1541 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1542 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1542 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1543
1543
1544 STATE_CREATED = 'repo_state_created'
1544 STATE_CREATED = 'repo_state_created'
1545 STATE_PENDING = 'repo_state_pending'
1545 STATE_PENDING = 'repo_state_pending'
1546 STATE_ERROR = 'repo_state_error'
1546 STATE_ERROR = 'repo_state_error'
1547
1547
1548 LOCK_AUTOMATIC = 'lock_auto'
1548 LOCK_AUTOMATIC = 'lock_auto'
1549 LOCK_API = 'lock_api'
1549 LOCK_API = 'lock_api'
1550 LOCK_WEB = 'lock_web'
1550 LOCK_WEB = 'lock_web'
1551 LOCK_PULL = 'lock_pull'
1551 LOCK_PULL = 'lock_pull'
1552
1552
1553 NAME_SEP = URL_SEP
1553 NAME_SEP = URL_SEP
1554
1554
1555 repo_id = Column(
1555 repo_id = Column(
1556 "repo_id", Integer(), nullable=False, unique=True, default=None,
1556 "repo_id", Integer(), nullable=False, unique=True, default=None,
1557 primary_key=True)
1557 primary_key=True)
1558 _repo_name = Column(
1558 _repo_name = Column(
1559 "repo_name", Text(), nullable=False, default=None)
1559 "repo_name", Text(), nullable=False, default=None)
1560 _repo_name_hash = Column(
1560 _repo_name_hash = Column(
1561 "repo_name_hash", String(255), nullable=False, unique=True)
1561 "repo_name_hash", String(255), nullable=False, unique=True)
1562 repo_state = Column("repo_state", String(255), nullable=True)
1562 repo_state = Column("repo_state", String(255), nullable=True)
1563
1563
1564 clone_uri = Column(
1564 clone_uri = Column(
1565 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1565 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1566 default=None)
1566 default=None)
1567 push_uri = Column(
1567 push_uri = Column(
1568 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1568 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1569 default=None)
1569 default=None)
1570 repo_type = Column(
1570 repo_type = Column(
1571 "repo_type", String(255), nullable=False, unique=False, default=None)
1571 "repo_type", String(255), nullable=False, unique=False, default=None)
1572 user_id = Column(
1572 user_id = Column(
1573 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1573 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1574 unique=False, default=None)
1574 unique=False, default=None)
1575 private = Column(
1575 private = Column(
1576 "private", Boolean(), nullable=True, unique=None, default=None)
1576 "private", Boolean(), nullable=True, unique=None, default=None)
1577 enable_statistics = Column(
1577 enable_statistics = Column(
1578 "statistics", Boolean(), nullable=True, unique=None, default=True)
1578 "statistics", Boolean(), nullable=True, unique=None, default=True)
1579 enable_downloads = Column(
1579 enable_downloads = Column(
1580 "downloads", Boolean(), nullable=True, unique=None, default=True)
1580 "downloads", Boolean(), nullable=True, unique=None, default=True)
1581 description = Column(
1581 description = Column(
1582 "description", String(10000), nullable=True, unique=None, default=None)
1582 "description", String(10000), nullable=True, unique=None, default=None)
1583 created_on = Column(
1583 created_on = Column(
1584 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1584 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1585 default=datetime.datetime.now)
1585 default=datetime.datetime.now)
1586 updated_on = Column(
1586 updated_on = Column(
1587 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1587 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1588 default=datetime.datetime.now)
1588 default=datetime.datetime.now)
1589 _landing_revision = Column(
1589 _landing_revision = Column(
1590 "landing_revision", String(255), nullable=False, unique=False,
1590 "landing_revision", String(255), nullable=False, unique=False,
1591 default=None)
1591 default=None)
1592 enable_locking = Column(
1592 enable_locking = Column(
1593 "enable_locking", Boolean(), nullable=False, unique=None,
1593 "enable_locking", Boolean(), nullable=False, unique=None,
1594 default=False)
1594 default=False)
1595 _locked = Column(
1595 _locked = Column(
1596 "locked", String(255), nullable=True, unique=False, default=None)
1596 "locked", String(255), nullable=True, unique=False, default=None)
1597 _changeset_cache = Column(
1597 _changeset_cache = Column(
1598 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1598 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1599
1599
1600 fork_id = Column(
1600 fork_id = Column(
1601 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1601 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1602 nullable=True, unique=False, default=None)
1602 nullable=True, unique=False, default=None)
1603 group_id = Column(
1603 group_id = Column(
1604 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1604 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1605 unique=False, default=None)
1605 unique=False, default=None)
1606
1606
1607 user = relationship('User', lazy='joined')
1607 user = relationship('User', lazy='joined')
1608 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1608 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1609 group = relationship('RepoGroup', lazy='joined')
1609 group = relationship('RepoGroup', lazy='joined')
1610 repo_to_perm = relationship(
1610 repo_to_perm = relationship(
1611 'UserRepoToPerm', cascade='all',
1611 'UserRepoToPerm', cascade='all',
1612 order_by='UserRepoToPerm.repo_to_perm_id')
1612 order_by='UserRepoToPerm.repo_to_perm_id')
1613 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1613 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1614 stats = relationship('Statistics', cascade='all', uselist=False)
1614 stats = relationship('Statistics', cascade='all', uselist=False)
1615
1615
1616 followers = relationship(
1616 followers = relationship(
1617 'UserFollowing',
1617 'UserFollowing',
1618 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1618 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1619 cascade='all')
1619 cascade='all')
1620 extra_fields = relationship(
1620 extra_fields = relationship(
1621 'RepositoryField', cascade="all, delete, delete-orphan")
1621 'RepositoryField', cascade="all, delete, delete-orphan")
1622 logs = relationship('UserLog')
1622 logs = relationship('UserLog')
1623 comments = relationship(
1623 comments = relationship(
1624 'ChangesetComment', cascade="all, delete, delete-orphan")
1624 'ChangesetComment', cascade="all, delete, delete-orphan")
1625 pull_requests_source = relationship(
1625 pull_requests_source = relationship(
1626 'PullRequest',
1626 'PullRequest',
1627 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1627 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1628 cascade="all, delete, delete-orphan")
1628 cascade="all, delete, delete-orphan")
1629 pull_requests_target = relationship(
1629 pull_requests_target = relationship(
1630 'PullRequest',
1630 'PullRequest',
1631 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1631 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1632 cascade="all, delete, delete-orphan")
1632 cascade="all, delete, delete-orphan")
1633 ui = relationship('RepoRhodeCodeUi', cascade="all")
1633 ui = relationship('RepoRhodeCodeUi', cascade="all")
1634 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1634 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1635 integrations = relationship('Integration',
1635 integrations = relationship('Integration',
1636 cascade="all, delete, delete-orphan")
1636 cascade="all, delete, delete-orphan")
1637
1637
1638 scoped_tokens = relationship('UserApiKeys', cascade="all")
1638 scoped_tokens = relationship('UserApiKeys', cascade="all")
1639
1639
1640 def __unicode__(self):
1640 def __unicode__(self):
1641 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1641 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1642 safe_unicode(self.repo_name))
1642 safe_unicode(self.repo_name))
1643
1643
1644 @hybrid_property
1644 @hybrid_property
1645 def description_safe(self):
1645 def description_safe(self):
1646 from rhodecode.lib import helpers as h
1646 from rhodecode.lib import helpers as h
1647 return h.escape(self.description)
1647 return h.escape(self.description)
1648
1648
1649 @hybrid_property
1649 @hybrid_property
1650 def landing_rev(self):
1650 def landing_rev(self):
1651 # always should return [rev_type, rev]
1651 # always should return [rev_type, rev]
1652 if self._landing_revision:
1652 if self._landing_revision:
1653 _rev_info = self._landing_revision.split(':')
1653 _rev_info = self._landing_revision.split(':')
1654 if len(_rev_info) < 2:
1654 if len(_rev_info) < 2:
1655 _rev_info.insert(0, 'rev')
1655 _rev_info.insert(0, 'rev')
1656 return [_rev_info[0], _rev_info[1]]
1656 return [_rev_info[0], _rev_info[1]]
1657 return [None, None]
1657 return [None, None]
1658
1658
1659 @landing_rev.setter
1659 @landing_rev.setter
1660 def landing_rev(self, val):
1660 def landing_rev(self, val):
1661 if ':' not in val:
1661 if ':' not in val:
1662 raise ValueError('value must be delimited with `:` and consist '
1662 raise ValueError('value must be delimited with `:` and consist '
1663 'of <rev_type>:<rev>, got %s instead' % val)
1663 'of <rev_type>:<rev>, got %s instead' % val)
1664 self._landing_revision = val
1664 self._landing_revision = val
1665
1665
1666 @hybrid_property
1666 @hybrid_property
1667 def locked(self):
1667 def locked(self):
1668 if self._locked:
1668 if self._locked:
1669 user_id, timelocked, reason = self._locked.split(':')
1669 user_id, timelocked, reason = self._locked.split(':')
1670 lock_values = int(user_id), timelocked, reason
1670 lock_values = int(user_id), timelocked, reason
1671 else:
1671 else:
1672 lock_values = [None, None, None]
1672 lock_values = [None, None, None]
1673 return lock_values
1673 return lock_values
1674
1674
1675 @locked.setter
1675 @locked.setter
1676 def locked(self, val):
1676 def locked(self, val):
1677 if val and isinstance(val, (list, tuple)):
1677 if val and isinstance(val, (list, tuple)):
1678 self._locked = ':'.join(map(str, val))
1678 self._locked = ':'.join(map(str, val))
1679 else:
1679 else:
1680 self._locked = None
1680 self._locked = None
1681
1681
1682 @hybrid_property
1682 @hybrid_property
1683 def changeset_cache(self):
1683 def changeset_cache(self):
1684 from rhodecode.lib.vcs.backends.base import EmptyCommit
1684 from rhodecode.lib.vcs.backends.base import EmptyCommit
1685 dummy = EmptyCommit().__json__()
1685 dummy = EmptyCommit().__json__()
1686 if not self._changeset_cache:
1686 if not self._changeset_cache:
1687 return dummy
1687 return dummy
1688 try:
1688 try:
1689 return json.loads(self._changeset_cache)
1689 return json.loads(self._changeset_cache)
1690 except TypeError:
1690 except TypeError:
1691 return dummy
1691 return dummy
1692 except Exception:
1692 except Exception:
1693 log.error(traceback.format_exc())
1693 log.error(traceback.format_exc())
1694 return dummy
1694 return dummy
1695
1695
1696 @changeset_cache.setter
1696 @changeset_cache.setter
1697 def changeset_cache(self, val):
1697 def changeset_cache(self, val):
1698 try:
1698 try:
1699 self._changeset_cache = json.dumps(val)
1699 self._changeset_cache = json.dumps(val)
1700 except Exception:
1700 except Exception:
1701 log.error(traceback.format_exc())
1701 log.error(traceback.format_exc())
1702
1702
1703 @hybrid_property
1703 @hybrid_property
1704 def repo_name(self):
1704 def repo_name(self):
1705 return self._repo_name
1705 return self._repo_name
1706
1706
1707 @repo_name.setter
1707 @repo_name.setter
1708 def repo_name(self, value):
1708 def repo_name(self, value):
1709 self._repo_name = value
1709 self._repo_name = value
1710 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1710 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1711
1711
1712 @classmethod
1712 @classmethod
1713 def normalize_repo_name(cls, repo_name):
1713 def normalize_repo_name(cls, repo_name):
1714 """
1714 """
1715 Normalizes os specific repo_name to the format internally stored inside
1715 Normalizes os specific repo_name to the format internally stored inside
1716 database using URL_SEP
1716 database using URL_SEP
1717
1717
1718 :param cls:
1718 :param cls:
1719 :param repo_name:
1719 :param repo_name:
1720 """
1720 """
1721 return cls.NAME_SEP.join(repo_name.split(os.sep))
1721 return cls.NAME_SEP.join(repo_name.split(os.sep))
1722
1722
1723 @classmethod
1723 @classmethod
1724 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1724 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1725 session = Session()
1725 session = Session()
1726 q = session.query(cls).filter(cls.repo_name == repo_name)
1726 q = session.query(cls).filter(cls.repo_name == repo_name)
1727
1727
1728 if cache:
1728 if cache:
1729 if identity_cache:
1729 if identity_cache:
1730 val = cls.identity_cache(session, 'repo_name', repo_name)
1730 val = cls.identity_cache(session, 'repo_name', repo_name)
1731 if val:
1731 if val:
1732 return val
1732 return val
1733 else:
1733 else:
1734 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1734 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1735 q = q.options(
1735 q = q.options(
1736 FromCache("sql_cache_short", cache_key))
1736 FromCache("sql_cache_short", cache_key))
1737
1737
1738 return q.scalar()
1738 return q.scalar()
1739
1739
1740 @classmethod
1740 @classmethod
1741 def get_by_id_or_repo_name(cls, repoid):
1741 def get_by_id_or_repo_name(cls, repoid):
1742 if isinstance(repoid, (int, long)):
1742 if isinstance(repoid, (int, long)):
1743 try:
1743 try:
1744 repo = cls.get(repoid)
1744 repo = cls.get(repoid)
1745 except ValueError:
1745 except ValueError:
1746 repo = None
1746 repo = None
1747 else:
1747 else:
1748 repo = cls.get_by_repo_name(repoid)
1748 repo = cls.get_by_repo_name(repoid)
1749 return repo
1749 return repo
1750
1750
1751 @classmethod
1751 @classmethod
1752 def get_by_full_path(cls, repo_full_path):
1752 def get_by_full_path(cls, repo_full_path):
1753 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1753 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1754 repo_name = cls.normalize_repo_name(repo_name)
1754 repo_name = cls.normalize_repo_name(repo_name)
1755 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1755 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1756
1756
1757 @classmethod
1757 @classmethod
1758 def get_repo_forks(cls, repo_id):
1758 def get_repo_forks(cls, repo_id):
1759 return cls.query().filter(Repository.fork_id == repo_id)
1759 return cls.query().filter(Repository.fork_id == repo_id)
1760
1760
1761 @classmethod
1761 @classmethod
1762 def base_path(cls):
1762 def base_path(cls):
1763 """
1763 """
1764 Returns base path when all repos are stored
1764 Returns base path when all repos are stored
1765
1765
1766 :param cls:
1766 :param cls:
1767 """
1767 """
1768 q = Session().query(RhodeCodeUi)\
1768 q = Session().query(RhodeCodeUi)\
1769 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1769 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1770 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1770 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1771 return q.one().ui_value
1771 return q.one().ui_value
1772
1772
1773 @classmethod
1773 @classmethod
1774 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1774 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1775 case_insensitive=True):
1775 case_insensitive=True):
1776 q = Repository.query()
1776 q = Repository.query()
1777
1777
1778 if not isinstance(user_id, Optional):
1778 if not isinstance(user_id, Optional):
1779 q = q.filter(Repository.user_id == user_id)
1779 q = q.filter(Repository.user_id == user_id)
1780
1780
1781 if not isinstance(group_id, Optional):
1781 if not isinstance(group_id, Optional):
1782 q = q.filter(Repository.group_id == group_id)
1782 q = q.filter(Repository.group_id == group_id)
1783
1783
1784 if case_insensitive:
1784 if case_insensitive:
1785 q = q.order_by(func.lower(Repository.repo_name))
1785 q = q.order_by(func.lower(Repository.repo_name))
1786 else:
1786 else:
1787 q = q.order_by(Repository.repo_name)
1787 q = q.order_by(Repository.repo_name)
1788 return q.all()
1788 return q.all()
1789
1789
1790 @property
1790 @property
1791 def forks(self):
1791 def forks(self):
1792 """
1792 """
1793 Return forks of this repo
1793 Return forks of this repo
1794 """
1794 """
1795 return Repository.get_repo_forks(self.repo_id)
1795 return Repository.get_repo_forks(self.repo_id)
1796
1796
1797 @property
1797 @property
1798 def parent(self):
1798 def parent(self):
1799 """
1799 """
1800 Returns fork parent
1800 Returns fork parent
1801 """
1801 """
1802 return self.fork
1802 return self.fork
1803
1803
1804 @property
1804 @property
1805 def just_name(self):
1805 def just_name(self):
1806 return self.repo_name.split(self.NAME_SEP)[-1]
1806 return self.repo_name.split(self.NAME_SEP)[-1]
1807
1807
1808 @property
1808 @property
1809 def groups_with_parents(self):
1809 def groups_with_parents(self):
1810 groups = []
1810 groups = []
1811 if self.group is None:
1811 if self.group is None:
1812 return groups
1812 return groups
1813
1813
1814 cur_gr = self.group
1814 cur_gr = self.group
1815 groups.insert(0, cur_gr)
1815 groups.insert(0, cur_gr)
1816 while 1:
1816 while 1:
1817 gr = getattr(cur_gr, 'parent_group', None)
1817 gr = getattr(cur_gr, 'parent_group', None)
1818 cur_gr = cur_gr.parent_group
1818 cur_gr = cur_gr.parent_group
1819 if gr is None:
1819 if gr is None:
1820 break
1820 break
1821 groups.insert(0, gr)
1821 groups.insert(0, gr)
1822
1822
1823 return groups
1823 return groups
1824
1824
1825 @property
1825 @property
1826 def groups_and_repo(self):
1826 def groups_and_repo(self):
1827 return self.groups_with_parents, self
1827 return self.groups_with_parents, self
1828
1828
1829 @LazyProperty
1829 @LazyProperty
1830 def repo_path(self):
1830 def repo_path(self):
1831 """
1831 """
1832 Returns base full path for that repository means where it actually
1832 Returns base full path for that repository means where it actually
1833 exists on a filesystem
1833 exists on a filesystem
1834 """
1834 """
1835 q = Session().query(RhodeCodeUi).filter(
1835 q = Session().query(RhodeCodeUi).filter(
1836 RhodeCodeUi.ui_key == self.NAME_SEP)
1836 RhodeCodeUi.ui_key == self.NAME_SEP)
1837 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1837 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1838 return q.one().ui_value
1838 return q.one().ui_value
1839
1839
1840 @property
1840 @property
1841 def repo_full_path(self):
1841 def repo_full_path(self):
1842 p = [self.repo_path]
1842 p = [self.repo_path]
1843 # we need to split the name by / since this is how we store the
1843 # we need to split the name by / since this is how we store the
1844 # names in the database, but that eventually needs to be converted
1844 # names in the database, but that eventually needs to be converted
1845 # into a valid system path
1845 # into a valid system path
1846 p += self.repo_name.split(self.NAME_SEP)
1846 p += self.repo_name.split(self.NAME_SEP)
1847 return os.path.join(*map(safe_unicode, p))
1847 return os.path.join(*map(safe_unicode, p))
1848
1848
1849 @property
1849 @property
1850 def cache_keys(self):
1850 def cache_keys(self):
1851 """
1851 """
1852 Returns associated cache keys for that repo
1852 Returns associated cache keys for that repo
1853 """
1853 """
1854 return CacheKey.query()\
1854 return CacheKey.query()\
1855 .filter(CacheKey.cache_args == self.repo_name)\
1855 .filter(CacheKey.cache_args == self.repo_name)\
1856 .order_by(CacheKey.cache_key)\
1856 .order_by(CacheKey.cache_key)\
1857 .all()
1857 .all()
1858
1858
1859 @property
1859 @property
1860 def cached_diffs_relative_dir(self):
1860 def cached_diffs_relative_dir(self):
1861 """
1861 """
1862 Return a relative to the repository store path of cached diffs
1862 Return a relative to the repository store path of cached diffs
1863 used for safe display for users, who shouldn't know the absolute store
1863 used for safe display for users, who shouldn't know the absolute store
1864 path
1864 path
1865 """
1865 """
1866 return os.path.join(
1866 return os.path.join(
1867 os.path.dirname(self.repo_name),
1867 os.path.dirname(self.repo_name),
1868 self.cached_diffs_dir.split(os.path.sep)[-1])
1868 self.cached_diffs_dir.split(os.path.sep)[-1])
1869
1869
1870 @property
1870 @property
1871 def cached_diffs_dir(self):
1871 def cached_diffs_dir(self):
1872 path = self.repo_full_path
1872 path = self.repo_full_path
1873 return os.path.join(
1873 return os.path.join(
1874 os.path.dirname(path),
1874 os.path.dirname(path),
1875 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1875 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1876
1876
1877 def cached_diffs(self):
1877 def cached_diffs(self):
1878 diff_cache_dir = self.cached_diffs_dir
1878 diff_cache_dir = self.cached_diffs_dir
1879 if os.path.isdir(diff_cache_dir):
1879 if os.path.isdir(diff_cache_dir):
1880 return os.listdir(diff_cache_dir)
1880 return os.listdir(diff_cache_dir)
1881 return []
1881 return []
1882
1882
1883 def shadow_repos(self):
1883 def shadow_repos(self):
1884 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1884 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1885 return [
1885 return [
1886 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1886 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1887 if x.startswith(shadow_repos_pattern)]
1887 if x.startswith(shadow_repos_pattern)]
1888
1888
1889 def get_new_name(self, repo_name):
1889 def get_new_name(self, repo_name):
1890 """
1890 """
1891 returns new full repository name based on assigned group and new new
1891 returns new full repository name based on assigned group and new new
1892
1892
1893 :param group_name:
1893 :param group_name:
1894 """
1894 """
1895 path_prefix = self.group.full_path_splitted if self.group else []
1895 path_prefix = self.group.full_path_splitted if self.group else []
1896 return self.NAME_SEP.join(path_prefix + [repo_name])
1896 return self.NAME_SEP.join(path_prefix + [repo_name])
1897
1897
1898 @property
1898 @property
1899 def _config(self):
1899 def _config(self):
1900 """
1900 """
1901 Returns db based config object.
1901 Returns db based config object.
1902 """
1902 """
1903 from rhodecode.lib.utils import make_db_config
1903 from rhodecode.lib.utils import make_db_config
1904 return make_db_config(clear_session=False, repo=self)
1904 return make_db_config(clear_session=False, repo=self)
1905
1905
1906 def permissions(self, with_admins=True, with_owner=True):
1906 def permissions(self, with_admins=True, with_owner=True):
1907 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1907 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1908 q = q.options(joinedload(UserRepoToPerm.repository),
1908 q = q.options(joinedload(UserRepoToPerm.repository),
1909 joinedload(UserRepoToPerm.user),
1909 joinedload(UserRepoToPerm.user),
1910 joinedload(UserRepoToPerm.permission),)
1910 joinedload(UserRepoToPerm.permission),)
1911
1911
1912 # get owners and admins and permissions. We do a trick of re-writing
1912 # get owners and admins and permissions. We do a trick of re-writing
1913 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1913 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1914 # has a global reference and changing one object propagates to all
1914 # has a global reference and changing one object propagates to all
1915 # others. This means if admin is also an owner admin_row that change
1915 # others. This means if admin is also an owner admin_row that change
1916 # would propagate to both objects
1916 # would propagate to both objects
1917 perm_rows = []
1917 perm_rows = []
1918 for _usr in q.all():
1918 for _usr in q.all():
1919 usr = AttributeDict(_usr.user.get_dict())
1919 usr = AttributeDict(_usr.user.get_dict())
1920 usr.permission = _usr.permission.permission_name
1920 usr.permission = _usr.permission.permission_name
1921 perm_rows.append(usr)
1921 perm_rows.append(usr)
1922
1922
1923 # filter the perm rows by 'default' first and then sort them by
1923 # filter the perm rows by 'default' first and then sort them by
1924 # admin,write,read,none permissions sorted again alphabetically in
1924 # admin,write,read,none permissions sorted again alphabetically in
1925 # each group
1925 # each group
1926 perm_rows = sorted(perm_rows, key=display_user_sort)
1926 perm_rows = sorted(perm_rows, key=display_user_sort)
1927
1927
1928 _admin_perm = 'repository.admin'
1928 _admin_perm = 'repository.admin'
1929 owner_row = []
1929 owner_row = []
1930 if with_owner:
1930 if with_owner:
1931 usr = AttributeDict(self.user.get_dict())
1931 usr = AttributeDict(self.user.get_dict())
1932 usr.owner_row = True
1932 usr.owner_row = True
1933 usr.permission = _admin_perm
1933 usr.permission = _admin_perm
1934 owner_row.append(usr)
1934 owner_row.append(usr)
1935
1935
1936 super_admin_rows = []
1936 super_admin_rows = []
1937 if with_admins:
1937 if with_admins:
1938 for usr in User.get_all_super_admins():
1938 for usr in User.get_all_super_admins():
1939 # if this admin is also owner, don't double the record
1939 # if this admin is also owner, don't double the record
1940 if usr.user_id == owner_row[0].user_id:
1940 if usr.user_id == owner_row[0].user_id:
1941 owner_row[0].admin_row = True
1941 owner_row[0].admin_row = True
1942 else:
1942 else:
1943 usr = AttributeDict(usr.get_dict())
1943 usr = AttributeDict(usr.get_dict())
1944 usr.admin_row = True
1944 usr.admin_row = True
1945 usr.permission = _admin_perm
1945 usr.permission = _admin_perm
1946 super_admin_rows.append(usr)
1946 super_admin_rows.append(usr)
1947
1947
1948 return super_admin_rows + owner_row + perm_rows
1948 return super_admin_rows + owner_row + perm_rows
1949
1949
1950 def permission_user_groups(self):
1950 def permission_user_groups(self):
1951 q = UserGroupRepoToPerm.query().filter(
1951 q = UserGroupRepoToPerm.query().filter(
1952 UserGroupRepoToPerm.repository == self)
1952 UserGroupRepoToPerm.repository == self)
1953 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1953 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1954 joinedload(UserGroupRepoToPerm.users_group),
1954 joinedload(UserGroupRepoToPerm.users_group),
1955 joinedload(UserGroupRepoToPerm.permission),)
1955 joinedload(UserGroupRepoToPerm.permission),)
1956
1956
1957 perm_rows = []
1957 perm_rows = []
1958 for _user_group in q.all():
1958 for _user_group in q.all():
1959 usr = AttributeDict(_user_group.users_group.get_dict())
1959 usr = AttributeDict(_user_group.users_group.get_dict())
1960 usr.permission = _user_group.permission.permission_name
1960 usr.permission = _user_group.permission.permission_name
1961 perm_rows.append(usr)
1961 perm_rows.append(usr)
1962
1962
1963 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1963 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1964 return perm_rows
1964 return perm_rows
1965
1965
1966 def get_api_data(self, include_secrets=False):
1966 def get_api_data(self, include_secrets=False):
1967 """
1967 """
1968 Common function for generating repo api data
1968 Common function for generating repo api data
1969
1969
1970 :param include_secrets: See :meth:`User.get_api_data`.
1970 :param include_secrets: See :meth:`User.get_api_data`.
1971
1971
1972 """
1972 """
1973 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1973 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1974 # move this methods on models level.
1974 # move this methods on models level.
1975 from rhodecode.model.settings import SettingsModel
1975 from rhodecode.model.settings import SettingsModel
1976 from rhodecode.model.repo import RepoModel
1976 from rhodecode.model.repo import RepoModel
1977
1977
1978 repo = self
1978 repo = self
1979 _user_id, _time, _reason = self.locked
1979 _user_id, _time, _reason = self.locked
1980
1980
1981 data = {
1981 data = {
1982 'repo_id': repo.repo_id,
1982 'repo_id': repo.repo_id,
1983 'repo_name': repo.repo_name,
1983 'repo_name': repo.repo_name,
1984 'repo_type': repo.repo_type,
1984 'repo_type': repo.repo_type,
1985 'clone_uri': repo.clone_uri or '',
1985 'clone_uri': repo.clone_uri or '',
1986 'push_uri': repo.push_uri or '',
1986 'push_uri': repo.push_uri or '',
1987 'url': RepoModel().get_url(self),
1987 'url': RepoModel().get_url(self),
1988 'private': repo.private,
1988 'private': repo.private,
1989 'created_on': repo.created_on,
1989 'created_on': repo.created_on,
1990 'description': repo.description_safe,
1990 'description': repo.description_safe,
1991 'landing_rev': repo.landing_rev,
1991 'landing_rev': repo.landing_rev,
1992 'owner': repo.user.username,
1992 'owner': repo.user.username,
1993 'fork_of': repo.fork.repo_name if repo.fork else None,
1993 'fork_of': repo.fork.repo_name if repo.fork else None,
1994 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1994 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1995 'enable_statistics': repo.enable_statistics,
1995 'enable_statistics': repo.enable_statistics,
1996 'enable_locking': repo.enable_locking,
1996 'enable_locking': repo.enable_locking,
1997 'enable_downloads': repo.enable_downloads,
1997 'enable_downloads': repo.enable_downloads,
1998 'last_changeset': repo.changeset_cache,
1998 'last_changeset': repo.changeset_cache,
1999 'locked_by': User.get(_user_id).get_api_data(
1999 'locked_by': User.get(_user_id).get_api_data(
2000 include_secrets=include_secrets) if _user_id else None,
2000 include_secrets=include_secrets) if _user_id else None,
2001 'locked_date': time_to_datetime(_time) if _time else None,
2001 'locked_date': time_to_datetime(_time) if _time else None,
2002 'lock_reason': _reason if _reason else None,
2002 'lock_reason': _reason if _reason else None,
2003 }
2003 }
2004
2004
2005 # TODO: mikhail: should be per-repo settings here
2005 # TODO: mikhail: should be per-repo settings here
2006 rc_config = SettingsModel().get_all_settings()
2006 rc_config = SettingsModel().get_all_settings()
2007 repository_fields = str2bool(
2007 repository_fields = str2bool(
2008 rc_config.get('rhodecode_repository_fields'))
2008 rc_config.get('rhodecode_repository_fields'))
2009 if repository_fields:
2009 if repository_fields:
2010 for f in self.extra_fields:
2010 for f in self.extra_fields:
2011 data[f.field_key_prefixed] = f.field_value
2011 data[f.field_key_prefixed] = f.field_value
2012
2012
2013 return data
2013 return data
2014
2014
2015 @classmethod
2015 @classmethod
2016 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2016 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2017 if not lock_time:
2017 if not lock_time:
2018 lock_time = time.time()
2018 lock_time = time.time()
2019 if not lock_reason:
2019 if not lock_reason:
2020 lock_reason = cls.LOCK_AUTOMATIC
2020 lock_reason = cls.LOCK_AUTOMATIC
2021 repo.locked = [user_id, lock_time, lock_reason]
2021 repo.locked = [user_id, lock_time, lock_reason]
2022 Session().add(repo)
2022 Session().add(repo)
2023 Session().commit()
2023 Session().commit()
2024
2024
2025 @classmethod
2025 @classmethod
2026 def unlock(cls, repo):
2026 def unlock(cls, repo):
2027 repo.locked = None
2027 repo.locked = None
2028 Session().add(repo)
2028 Session().add(repo)
2029 Session().commit()
2029 Session().commit()
2030
2030
2031 @classmethod
2031 @classmethod
2032 def getlock(cls, repo):
2032 def getlock(cls, repo):
2033 return repo.locked
2033 return repo.locked
2034
2034
2035 def is_user_lock(self, user_id):
2035 def is_user_lock(self, user_id):
2036 if self.lock[0]:
2036 if self.lock[0]:
2037 lock_user_id = safe_int(self.lock[0])
2037 lock_user_id = safe_int(self.lock[0])
2038 user_id = safe_int(user_id)
2038 user_id = safe_int(user_id)
2039 # both are ints, and they are equal
2039 # both are ints, and they are equal
2040 return all([lock_user_id, user_id]) and lock_user_id == user_id
2040 return all([lock_user_id, user_id]) and lock_user_id == user_id
2041
2041
2042 return False
2042 return False
2043
2043
2044 def get_locking_state(self, action, user_id, only_when_enabled=True):
2044 def get_locking_state(self, action, user_id, only_when_enabled=True):
2045 """
2045 """
2046 Checks locking on this repository, if locking is enabled and lock is
2046 Checks locking on this repository, if locking is enabled and lock is
2047 present returns a tuple of make_lock, locked, locked_by.
2047 present returns a tuple of make_lock, locked, locked_by.
2048 make_lock can have 3 states None (do nothing) True, make lock
2048 make_lock can have 3 states None (do nothing) True, make lock
2049 False release lock, This value is later propagated to hooks, which
2049 False release lock, This value is later propagated to hooks, which
2050 do the locking. Think about this as signals passed to hooks what to do.
2050 do the locking. Think about this as signals passed to hooks what to do.
2051
2051
2052 """
2052 """
2053 # TODO: johbo: This is part of the business logic and should be moved
2053 # TODO: johbo: This is part of the business logic and should be moved
2054 # into the RepositoryModel.
2054 # into the RepositoryModel.
2055
2055
2056 if action not in ('push', 'pull'):
2056 if action not in ('push', 'pull'):
2057 raise ValueError("Invalid action value: %s" % repr(action))
2057 raise ValueError("Invalid action value: %s" % repr(action))
2058
2058
2059 # defines if locked error should be thrown to user
2059 # defines if locked error should be thrown to user
2060 currently_locked = False
2060 currently_locked = False
2061 # defines if new lock should be made, tri-state
2061 # defines if new lock should be made, tri-state
2062 make_lock = None
2062 make_lock = None
2063 repo = self
2063 repo = self
2064 user = User.get(user_id)
2064 user = User.get(user_id)
2065
2065
2066 lock_info = repo.locked
2066 lock_info = repo.locked
2067
2067
2068 if repo and (repo.enable_locking or not only_when_enabled):
2068 if repo and (repo.enable_locking or not only_when_enabled):
2069 if action == 'push':
2069 if action == 'push':
2070 # check if it's already locked !, if it is compare users
2070 # check if it's already locked !, if it is compare users
2071 locked_by_user_id = lock_info[0]
2071 locked_by_user_id = lock_info[0]
2072 if user.user_id == locked_by_user_id:
2072 if user.user_id == locked_by_user_id:
2073 log.debug(
2073 log.debug(
2074 'Got `push` action from user %s, now unlocking', user)
2074 'Got `push` action from user %s, now unlocking', user)
2075 # unlock if we have push from user who locked
2075 # unlock if we have push from user who locked
2076 make_lock = False
2076 make_lock = False
2077 else:
2077 else:
2078 # we're not the same user who locked, ban with
2078 # we're not the same user who locked, ban with
2079 # code defined in settings (default is 423 HTTP Locked) !
2079 # code defined in settings (default is 423 HTTP Locked) !
2080 log.debug('Repo %s is currently locked by %s', repo, user)
2080 log.debug('Repo %s is currently locked by %s', repo, user)
2081 currently_locked = True
2081 currently_locked = True
2082 elif action == 'pull':
2082 elif action == 'pull':
2083 # [0] user [1] date
2083 # [0] user [1] date
2084 if lock_info[0] and lock_info[1]:
2084 if lock_info[0] and lock_info[1]:
2085 log.debug('Repo %s is currently locked by %s', repo, user)
2085 log.debug('Repo %s is currently locked by %s', repo, user)
2086 currently_locked = True
2086 currently_locked = True
2087 else:
2087 else:
2088 log.debug('Setting lock on repo %s by %s', repo, user)
2088 log.debug('Setting lock on repo %s by %s', repo, user)
2089 make_lock = True
2089 make_lock = True
2090
2090
2091 else:
2091 else:
2092 log.debug('Repository %s do not have locking enabled', repo)
2092 log.debug('Repository %s do not have locking enabled', repo)
2093
2093
2094 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2094 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2095 make_lock, currently_locked, lock_info)
2095 make_lock, currently_locked, lock_info)
2096
2096
2097 from rhodecode.lib.auth import HasRepoPermissionAny
2097 from rhodecode.lib.auth import HasRepoPermissionAny
2098 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2098 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2099 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2099 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2100 # if we don't have at least write permission we cannot make a lock
2100 # if we don't have at least write permission we cannot make a lock
2101 log.debug('lock state reset back to FALSE due to lack '
2101 log.debug('lock state reset back to FALSE due to lack '
2102 'of at least read permission')
2102 'of at least read permission')
2103 make_lock = False
2103 make_lock = False
2104
2104
2105 return make_lock, currently_locked, lock_info
2105 return make_lock, currently_locked, lock_info
2106
2106
2107 @property
2107 @property
2108 def last_db_change(self):
2108 def last_db_change(self):
2109 return self.updated_on
2109 return self.updated_on
2110
2110
2111 @property
2111 @property
2112 def clone_uri_hidden(self):
2112 def clone_uri_hidden(self):
2113 clone_uri = self.clone_uri
2113 clone_uri = self.clone_uri
2114 if clone_uri:
2114 if clone_uri:
2115 import urlobject
2115 import urlobject
2116 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2116 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2117 if url_obj.password:
2117 if url_obj.password:
2118 clone_uri = url_obj.with_password('*****')
2118 clone_uri = url_obj.with_password('*****')
2119 return clone_uri
2119 return clone_uri
2120
2120
2121 @property
2121 @property
2122 def push_uri_hidden(self):
2122 def push_uri_hidden(self):
2123 push_uri = self.push_uri
2123 push_uri = self.push_uri
2124 if push_uri:
2124 if push_uri:
2125 import urlobject
2125 import urlobject
2126 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2126 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2127 if url_obj.password:
2127 if url_obj.password:
2128 push_uri = url_obj.with_password('*****')
2128 push_uri = url_obj.with_password('*****')
2129 return push_uri
2129 return push_uri
2130
2130
2131 def clone_url(self, **override):
2131 def clone_url(self, **override):
2132 from rhodecode.model.settings import SettingsModel
2132 from rhodecode.model.settings import SettingsModel
2133
2133
2134 uri_tmpl = None
2134 uri_tmpl = None
2135 if 'with_id' in override:
2135 if 'with_id' in override:
2136 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2136 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2137 del override['with_id']
2137 del override['with_id']
2138
2138
2139 if 'uri_tmpl' in override:
2139 if 'uri_tmpl' in override:
2140 uri_tmpl = override['uri_tmpl']
2140 uri_tmpl = override['uri_tmpl']
2141 del override['uri_tmpl']
2141 del override['uri_tmpl']
2142
2142
2143 ssh = False
2143 ssh = False
2144 if 'ssh' in override:
2144 if 'ssh' in override:
2145 ssh = True
2145 ssh = True
2146 del override['ssh']
2146 del override['ssh']
2147
2147
2148 # we didn't override our tmpl from **overrides
2148 # we didn't override our tmpl from **overrides
2149 if not uri_tmpl:
2149 if not uri_tmpl:
2150 rc_config = SettingsModel().get_all_settings(cache=True)
2150 rc_config = SettingsModel().get_all_settings(cache=True)
2151 if ssh:
2151 if ssh:
2152 uri_tmpl = rc_config.get(
2152 uri_tmpl = rc_config.get(
2153 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2153 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2154 else:
2154 else:
2155 uri_tmpl = rc_config.get(
2155 uri_tmpl = rc_config.get(
2156 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2156 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2157
2157
2158 request = get_current_request()
2158 request = get_current_request()
2159 return get_clone_url(request=request,
2159 return get_clone_url(request=request,
2160 uri_tmpl=uri_tmpl,
2160 uri_tmpl=uri_tmpl,
2161 repo_name=self.repo_name,
2161 repo_name=self.repo_name,
2162 repo_id=self.repo_id, **override)
2162 repo_id=self.repo_id, **override)
2163
2163
2164 def set_state(self, state):
2164 def set_state(self, state):
2165 self.repo_state = state
2165 self.repo_state = state
2166 Session().add(self)
2166 Session().add(self)
2167 #==========================================================================
2167 #==========================================================================
2168 # SCM PROPERTIES
2168 # SCM PROPERTIES
2169 #==========================================================================
2169 #==========================================================================
2170
2170
2171 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2171 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2172 return get_commit_safe(
2172 return get_commit_safe(
2173 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2173 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2174
2174
2175 def get_changeset(self, rev=None, pre_load=None):
2175 def get_changeset(self, rev=None, pre_load=None):
2176 warnings.warn("Use get_commit", DeprecationWarning)
2176 warnings.warn("Use get_commit", DeprecationWarning)
2177 commit_id = None
2177 commit_id = None
2178 commit_idx = None
2178 commit_idx = None
2179 if isinstance(rev, basestring):
2179 if isinstance(rev, basestring):
2180 commit_id = rev
2180 commit_id = rev
2181 else:
2181 else:
2182 commit_idx = rev
2182 commit_idx = rev
2183 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2183 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2184 pre_load=pre_load)
2184 pre_load=pre_load)
2185
2185
2186 def get_landing_commit(self):
2186 def get_landing_commit(self):
2187 """
2187 """
2188 Returns landing commit, or if that doesn't exist returns the tip
2188 Returns landing commit, or if that doesn't exist returns the tip
2189 """
2189 """
2190 _rev_type, _rev = self.landing_rev
2190 _rev_type, _rev = self.landing_rev
2191 commit = self.get_commit(_rev)
2191 commit = self.get_commit(_rev)
2192 if isinstance(commit, EmptyCommit):
2192 if isinstance(commit, EmptyCommit):
2193 return self.get_commit()
2193 return self.get_commit()
2194 return commit
2194 return commit
2195
2195
2196 def update_commit_cache(self, cs_cache=None, config=None):
2196 def update_commit_cache(self, cs_cache=None, config=None):
2197 """
2197 """
2198 Update cache of last changeset for repository, keys should be::
2198 Update cache of last changeset for repository, keys should be::
2199
2199
2200 short_id
2200 short_id
2201 raw_id
2201 raw_id
2202 revision
2202 revision
2203 parents
2203 parents
2204 message
2204 message
2205 date
2205 date
2206 author
2206 author
2207
2207
2208 :param cs_cache:
2208 :param cs_cache:
2209 """
2209 """
2210 from rhodecode.lib.vcs.backends.base import BaseChangeset
2210 from rhodecode.lib.vcs.backends.base import BaseChangeset
2211 if cs_cache is None:
2211 if cs_cache is None:
2212 # use no-cache version here
2212 # use no-cache version here
2213 scm_repo = self.scm_instance(cache=False, config=config)
2213 scm_repo = self.scm_instance(cache=False, config=config)
2214 if scm_repo:
2214 if scm_repo:
2215 cs_cache = scm_repo.get_commit(
2215 cs_cache = scm_repo.get_commit(
2216 pre_load=["author", "date", "message", "parents"])
2216 pre_load=["author", "date", "message", "parents"])
2217 else:
2217 else:
2218 cs_cache = EmptyCommit()
2218 cs_cache = EmptyCommit()
2219
2219
2220 if isinstance(cs_cache, BaseChangeset):
2220 if isinstance(cs_cache, BaseChangeset):
2221 cs_cache = cs_cache.__json__()
2221 cs_cache = cs_cache.__json__()
2222
2222
2223 def is_outdated(new_cs_cache):
2223 def is_outdated(new_cs_cache):
2224 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2224 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2225 new_cs_cache['revision'] != self.changeset_cache['revision']):
2225 new_cs_cache['revision'] != self.changeset_cache['revision']):
2226 return True
2226 return True
2227 return False
2227 return False
2228
2228
2229 # check if we have maybe already latest cached revision
2229 # check if we have maybe already latest cached revision
2230 if is_outdated(cs_cache) or not self.changeset_cache:
2230 if is_outdated(cs_cache) or not self.changeset_cache:
2231 _default = datetime.datetime.utcnow()
2231 _default = datetime.datetime.utcnow()
2232 last_change = cs_cache.get('date') or _default
2232 last_change = cs_cache.get('date') or _default
2233 if self.updated_on and self.updated_on > last_change:
2233 if self.updated_on and self.updated_on > last_change:
2234 # we check if last update is newer than the new value
2234 # we check if last update is newer than the new value
2235 # if yes, we use the current timestamp instead. Imagine you get
2235 # if yes, we use the current timestamp instead. Imagine you get
2236 # old commit pushed 1y ago, we'd set last update 1y to ago.
2236 # old commit pushed 1y ago, we'd set last update 1y to ago.
2237 last_change = _default
2237 last_change = _default
2238 log.debug('updated repo %s with new cs cache %s',
2238 log.debug('updated repo %s with new cs cache %s',
2239 self.repo_name, cs_cache)
2239 self.repo_name, cs_cache)
2240 self.updated_on = last_change
2240 self.updated_on = last_change
2241 self.changeset_cache = cs_cache
2241 self.changeset_cache = cs_cache
2242 Session().add(self)
2242 Session().add(self)
2243 Session().commit()
2243 Session().commit()
2244 else:
2244 else:
2245 log.debug('Skipping update_commit_cache for repo:`%s` '
2245 log.debug('Skipping update_commit_cache for repo:`%s` '
2246 'commit already with latest changes', self.repo_name)
2246 'commit already with latest changes', self.repo_name)
2247
2247
2248 @property
2248 @property
2249 def tip(self):
2249 def tip(self):
2250 return self.get_commit('tip')
2250 return self.get_commit('tip')
2251
2251
2252 @property
2252 @property
2253 def author(self):
2253 def author(self):
2254 return self.tip.author
2254 return self.tip.author
2255
2255
2256 @property
2256 @property
2257 def last_change(self):
2257 def last_change(self):
2258 return self.scm_instance().last_change
2258 return self.scm_instance().last_change
2259
2259
2260 def get_comments(self, revisions=None):
2260 def get_comments(self, revisions=None):
2261 """
2261 """
2262 Returns comments for this repository grouped by revisions
2262 Returns comments for this repository grouped by revisions
2263
2263
2264 :param revisions: filter query by revisions only
2264 :param revisions: filter query by revisions only
2265 """
2265 """
2266 cmts = ChangesetComment.query()\
2266 cmts = ChangesetComment.query()\
2267 .filter(ChangesetComment.repo == self)
2267 .filter(ChangesetComment.repo == self)
2268 if revisions:
2268 if revisions:
2269 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2269 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2270 grouped = collections.defaultdict(list)
2270 grouped = collections.defaultdict(list)
2271 for cmt in cmts.all():
2271 for cmt in cmts.all():
2272 grouped[cmt.revision].append(cmt)
2272 grouped[cmt.revision].append(cmt)
2273 return grouped
2273 return grouped
2274
2274
2275 def statuses(self, revisions=None):
2275 def statuses(self, revisions=None):
2276 """
2276 """
2277 Returns statuses for this repository
2277 Returns statuses for this repository
2278
2278
2279 :param revisions: list of revisions to get statuses for
2279 :param revisions: list of revisions to get statuses for
2280 """
2280 """
2281 statuses = ChangesetStatus.query()\
2281 statuses = ChangesetStatus.query()\
2282 .filter(ChangesetStatus.repo == self)\
2282 .filter(ChangesetStatus.repo == self)\
2283 .filter(ChangesetStatus.version == 0)
2283 .filter(ChangesetStatus.version == 0)
2284
2284
2285 if revisions:
2285 if revisions:
2286 # Try doing the filtering in chunks to avoid hitting limits
2286 # Try doing the filtering in chunks to avoid hitting limits
2287 size = 500
2287 size = 500
2288 status_results = []
2288 status_results = []
2289 for chunk in xrange(0, len(revisions), size):
2289 for chunk in xrange(0, len(revisions), size):
2290 status_results += statuses.filter(
2290 status_results += statuses.filter(
2291 ChangesetStatus.revision.in_(
2291 ChangesetStatus.revision.in_(
2292 revisions[chunk: chunk+size])
2292 revisions[chunk: chunk+size])
2293 ).all()
2293 ).all()
2294 else:
2294 else:
2295 status_results = statuses.all()
2295 status_results = statuses.all()
2296
2296
2297 grouped = {}
2297 grouped = {}
2298
2298
2299 # maybe we have open new pullrequest without a status?
2299 # maybe we have open new pullrequest without a status?
2300 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2300 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2301 status_lbl = ChangesetStatus.get_status_lbl(stat)
2301 status_lbl = ChangesetStatus.get_status_lbl(stat)
2302 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2302 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2303 for rev in pr.revisions:
2303 for rev in pr.revisions:
2304 pr_id = pr.pull_request_id
2304 pr_id = pr.pull_request_id
2305 pr_repo = pr.target_repo.repo_name
2305 pr_repo = pr.target_repo.repo_name
2306 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2306 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2307
2307
2308 for stat in status_results:
2308 for stat in status_results:
2309 pr_id = pr_repo = None
2309 pr_id = pr_repo = None
2310 if stat.pull_request:
2310 if stat.pull_request:
2311 pr_id = stat.pull_request.pull_request_id
2311 pr_id = stat.pull_request.pull_request_id
2312 pr_repo = stat.pull_request.target_repo.repo_name
2312 pr_repo = stat.pull_request.target_repo.repo_name
2313 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2313 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2314 pr_id, pr_repo]
2314 pr_id, pr_repo]
2315 return grouped
2315 return grouped
2316
2316
2317 # ==========================================================================
2317 # ==========================================================================
2318 # SCM CACHE INSTANCE
2318 # SCM CACHE INSTANCE
2319 # ==========================================================================
2319 # ==========================================================================
2320
2320
2321 def scm_instance(self, **kwargs):
2321 def scm_instance(self, **kwargs):
2322 import rhodecode
2322 import rhodecode
2323
2323
2324 # Passing a config will not hit the cache currently only used
2324 # Passing a config will not hit the cache currently only used
2325 # for repo2dbmapper
2325 # for repo2dbmapper
2326 config = kwargs.pop('config', None)
2326 config = kwargs.pop('config', None)
2327 cache = kwargs.pop('cache', None)
2327 cache = kwargs.pop('cache', None)
2328 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2328 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2329 # if cache is NOT defined use default global, else we have a full
2329 # if cache is NOT defined use default global, else we have a full
2330 # control over cache behaviour
2330 # control over cache behaviour
2331 if cache is None and full_cache and not config:
2331 if cache is None and full_cache and not config:
2332 return self._get_instance_cached()
2332 return self._get_instance_cached()
2333 return self._get_instance(cache=bool(cache), config=config)
2333 return self._get_instance(cache=bool(cache), config=config)
2334
2334
2335 def _get_instance_cached(self):
2335 def _get_instance_cached(self):
2336 @cache_region('long_term')
2336 @cache_region('long_term')
2337 def _get_repo(cache_key):
2337 def _get_repo(cache_key):
2338 return self._get_instance()
2338 return self._get_instance()
2339
2339
2340 invalidator_context = CacheKey.repo_context_cache(
2340 invalidator_context = CacheKey.repo_context_cache(
2341 _get_repo, self.repo_name, None, thread_scoped=True)
2341 _get_repo, self.repo_name, None, thread_scoped=True)
2342
2342
2343 with invalidator_context as context:
2343 with invalidator_context as context:
2344 context.invalidate()
2344 context.invalidate()
2345 repo = context.compute()
2345 repo = context.compute()
2346
2346
2347 return repo
2347 return repo
2348
2348
2349 def _get_instance(self, cache=True, config=None):
2349 def _get_instance(self, cache=True, config=None):
2350 config = config or self._config
2350 config = config or self._config
2351 custom_wire = {
2351 custom_wire = {
2352 'cache': cache # controls the vcs.remote cache
2352 'cache': cache # controls the vcs.remote cache
2353 }
2353 }
2354 repo = get_vcs_instance(
2354 repo = get_vcs_instance(
2355 repo_path=safe_str(self.repo_full_path),
2355 repo_path=safe_str(self.repo_full_path),
2356 config=config,
2356 config=config,
2357 with_wire=custom_wire,
2357 with_wire=custom_wire,
2358 create=False,
2358 create=False,
2359 _vcs_alias=self.repo_type)
2359 _vcs_alias=self.repo_type)
2360
2360
2361 return repo
2361 return repo
2362
2362
2363 def __json__(self):
2363 def __json__(self):
2364 return {'landing_rev': self.landing_rev}
2364 return {'landing_rev': self.landing_rev}
2365
2365
2366 def get_dict(self):
2366 def get_dict(self):
2367
2367
2368 # Since we transformed `repo_name` to a hybrid property, we need to
2368 # Since we transformed `repo_name` to a hybrid property, we need to
2369 # keep compatibility with the code which uses `repo_name` field.
2369 # keep compatibility with the code which uses `repo_name` field.
2370
2370
2371 result = super(Repository, self).get_dict()
2371 result = super(Repository, self).get_dict()
2372 result['repo_name'] = result.pop('_repo_name', None)
2372 result['repo_name'] = result.pop('_repo_name', None)
2373 return result
2373 return result
2374
2374
2375
2375
2376 class RepoGroup(Base, BaseModel):
2376 class RepoGroup(Base, BaseModel):
2377 __tablename__ = 'groups'
2377 __tablename__ = 'groups'
2378 __table_args__ = (
2378 __table_args__ = (
2379 UniqueConstraint('group_name', 'group_parent_id'),
2379 UniqueConstraint('group_name', 'group_parent_id'),
2380 CheckConstraint('group_id != group_parent_id'),
2380 CheckConstraint('group_id != group_parent_id'),
2381 base_table_args,
2381 base_table_args,
2382 )
2382 )
2383 __mapper_args__ = {'order_by': 'group_name'}
2383 __mapper_args__ = {'order_by': 'group_name'}
2384
2384
2385 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2385 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2386
2386
2387 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2387 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2388 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2388 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2389 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2389 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2390 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2390 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2391 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2391 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2392 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2392 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2393 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2393 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2394 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2394 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2395 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2395 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2396
2396
2397 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2397 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2398 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2398 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2399 parent_group = relationship('RepoGroup', remote_side=group_id)
2399 parent_group = relationship('RepoGroup', remote_side=group_id)
2400 user = relationship('User')
2400 user = relationship('User')
2401 integrations = relationship('Integration',
2401 integrations = relationship('Integration',
2402 cascade="all, delete, delete-orphan")
2402 cascade="all, delete, delete-orphan")
2403
2403
2404 def __init__(self, group_name='', parent_group=None):
2404 def __init__(self, group_name='', parent_group=None):
2405 self.group_name = group_name
2405 self.group_name = group_name
2406 self.parent_group = parent_group
2406 self.parent_group = parent_group
2407
2407
2408 def __unicode__(self):
2408 def __unicode__(self):
2409 return u"<%s('id:%s:%s')>" % (
2409 return u"<%s('id:%s:%s')>" % (
2410 self.__class__.__name__, self.group_id, self.group_name)
2410 self.__class__.__name__, self.group_id, self.group_name)
2411
2411
2412 @hybrid_property
2412 @hybrid_property
2413 def description_safe(self):
2413 def description_safe(self):
2414 from rhodecode.lib import helpers as h
2414 from rhodecode.lib import helpers as h
2415 return h.escape(self.group_description)
2415 return h.escape(self.group_description)
2416
2416
2417 @classmethod
2417 @classmethod
2418 def _generate_choice(cls, repo_group):
2418 def _generate_choice(cls, repo_group):
2419 from webhelpers.html import literal as _literal
2419 from webhelpers.html import literal as _literal
2420 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2420 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2421 return repo_group.group_id, _name(repo_group.full_path_splitted)
2421 return repo_group.group_id, _name(repo_group.full_path_splitted)
2422
2422
2423 @classmethod
2423 @classmethod
2424 def groups_choices(cls, groups=None, show_empty_group=True):
2424 def groups_choices(cls, groups=None, show_empty_group=True):
2425 if not groups:
2425 if not groups:
2426 groups = cls.query().all()
2426 groups = cls.query().all()
2427
2427
2428 repo_groups = []
2428 repo_groups = []
2429 if show_empty_group:
2429 if show_empty_group:
2430 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2430 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2431
2431
2432 repo_groups.extend([cls._generate_choice(x) for x in groups])
2432 repo_groups.extend([cls._generate_choice(x) for x in groups])
2433
2433
2434 repo_groups = sorted(
2434 repo_groups = sorted(
2435 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2435 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2436 return repo_groups
2436 return repo_groups
2437
2437
2438 @classmethod
2438 @classmethod
2439 def url_sep(cls):
2439 def url_sep(cls):
2440 return URL_SEP
2440 return URL_SEP
2441
2441
2442 @classmethod
2442 @classmethod
2443 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2443 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2444 if case_insensitive:
2444 if case_insensitive:
2445 gr = cls.query().filter(func.lower(cls.group_name)
2445 gr = cls.query().filter(func.lower(cls.group_name)
2446 == func.lower(group_name))
2446 == func.lower(group_name))
2447 else:
2447 else:
2448 gr = cls.query().filter(cls.group_name == group_name)
2448 gr = cls.query().filter(cls.group_name == group_name)
2449 if cache:
2449 if cache:
2450 name_key = _hash_key(group_name)
2450 name_key = _hash_key(group_name)
2451 gr = gr.options(
2451 gr = gr.options(
2452 FromCache("sql_cache_short", "get_group_%s" % name_key))
2452 FromCache("sql_cache_short", "get_group_%s" % name_key))
2453 return gr.scalar()
2453 return gr.scalar()
2454
2454
2455 @classmethod
2455 @classmethod
2456 def get_user_personal_repo_group(cls, user_id):
2456 def get_user_personal_repo_group(cls, user_id):
2457 user = User.get(user_id)
2457 user = User.get(user_id)
2458 if user.username == User.DEFAULT_USER:
2458 if user.username == User.DEFAULT_USER:
2459 return None
2459 return None
2460
2460
2461 return cls.query()\
2461 return cls.query()\
2462 .filter(cls.personal == true()) \
2462 .filter(cls.personal == true()) \
2463 .filter(cls.user == user).scalar()
2463 .filter(cls.user == user).scalar()
2464
2464
2465 @classmethod
2465 @classmethod
2466 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2466 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2467 case_insensitive=True):
2467 case_insensitive=True):
2468 q = RepoGroup.query()
2468 q = RepoGroup.query()
2469
2469
2470 if not isinstance(user_id, Optional):
2470 if not isinstance(user_id, Optional):
2471 q = q.filter(RepoGroup.user_id == user_id)
2471 q = q.filter(RepoGroup.user_id == user_id)
2472
2472
2473 if not isinstance(group_id, Optional):
2473 if not isinstance(group_id, Optional):
2474 q = q.filter(RepoGroup.group_parent_id == group_id)
2474 q = q.filter(RepoGroup.group_parent_id == group_id)
2475
2475
2476 if case_insensitive:
2476 if case_insensitive:
2477 q = q.order_by(func.lower(RepoGroup.group_name))
2477 q = q.order_by(func.lower(RepoGroup.group_name))
2478 else:
2478 else:
2479 q = q.order_by(RepoGroup.group_name)
2479 q = q.order_by(RepoGroup.group_name)
2480 return q.all()
2480 return q.all()
2481
2481
2482 @property
2482 @property
2483 def parents(self):
2483 def parents(self):
2484 parents_recursion_limit = 10
2484 parents_recursion_limit = 10
2485 groups = []
2485 groups = []
2486 if self.parent_group is None:
2486 if self.parent_group is None:
2487 return groups
2487 return groups
2488 cur_gr = self.parent_group
2488 cur_gr = self.parent_group
2489 groups.insert(0, cur_gr)
2489 groups.insert(0, cur_gr)
2490 cnt = 0
2490 cnt = 0
2491 while 1:
2491 while 1:
2492 cnt += 1
2492 cnt += 1
2493 gr = getattr(cur_gr, 'parent_group', None)
2493 gr = getattr(cur_gr, 'parent_group', None)
2494 cur_gr = cur_gr.parent_group
2494 cur_gr = cur_gr.parent_group
2495 if gr is None:
2495 if gr is None:
2496 break
2496 break
2497 if cnt == parents_recursion_limit:
2497 if cnt == parents_recursion_limit:
2498 # this will prevent accidental infinit loops
2498 # this will prevent accidental infinit loops
2499 log.error(('more than %s parents found for group %s, stopping '
2499 log.error(('more than %s parents found for group %s, stopping '
2500 'recursive parent fetching' % (parents_recursion_limit, self)))
2500 'recursive parent fetching' % (parents_recursion_limit, self)))
2501 break
2501 break
2502
2502
2503 groups.insert(0, gr)
2503 groups.insert(0, gr)
2504 return groups
2504 return groups
2505
2505
2506 @property
2506 @property
2507 def last_db_change(self):
2507 def last_db_change(self):
2508 return self.updated_on
2508 return self.updated_on
2509
2509
2510 @property
2510 @property
2511 def children(self):
2511 def children(self):
2512 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2512 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2513
2513
2514 @property
2514 @property
2515 def name(self):
2515 def name(self):
2516 return self.group_name.split(RepoGroup.url_sep())[-1]
2516 return self.group_name.split(RepoGroup.url_sep())[-1]
2517
2517
2518 @property
2518 @property
2519 def full_path(self):
2519 def full_path(self):
2520 return self.group_name
2520 return self.group_name
2521
2521
2522 @property
2522 @property
2523 def full_path_splitted(self):
2523 def full_path_splitted(self):
2524 return self.group_name.split(RepoGroup.url_sep())
2524 return self.group_name.split(RepoGroup.url_sep())
2525
2525
2526 @property
2526 @property
2527 def repositories(self):
2527 def repositories(self):
2528 return Repository.query()\
2528 return Repository.query()\
2529 .filter(Repository.group == self)\
2529 .filter(Repository.group == self)\
2530 .order_by(Repository.repo_name)
2530 .order_by(Repository.repo_name)
2531
2531
2532 @property
2532 @property
2533 def repositories_recursive_count(self):
2533 def repositories_recursive_count(self):
2534 cnt = self.repositories.count()
2534 cnt = self.repositories.count()
2535
2535
2536 def children_count(group):
2536 def children_count(group):
2537 cnt = 0
2537 cnt = 0
2538 for child in group.children:
2538 for child in group.children:
2539 cnt += child.repositories.count()
2539 cnt += child.repositories.count()
2540 cnt += children_count(child)
2540 cnt += children_count(child)
2541 return cnt
2541 return cnt
2542
2542
2543 return cnt + children_count(self)
2543 return cnt + children_count(self)
2544
2544
2545 def _recursive_objects(self, include_repos=True):
2545 def _recursive_objects(self, include_repos=True):
2546 all_ = []
2546 all_ = []
2547
2547
2548 def _get_members(root_gr):
2548 def _get_members(root_gr):
2549 if include_repos:
2549 if include_repos:
2550 for r in root_gr.repositories:
2550 for r in root_gr.repositories:
2551 all_.append(r)
2551 all_.append(r)
2552 childs = root_gr.children.all()
2552 childs = root_gr.children.all()
2553 if childs:
2553 if childs:
2554 for gr in childs:
2554 for gr in childs:
2555 all_.append(gr)
2555 all_.append(gr)
2556 _get_members(gr)
2556 _get_members(gr)
2557
2557
2558 _get_members(self)
2558 _get_members(self)
2559 return [self] + all_
2559 return [self] + all_
2560
2560
2561 def recursive_groups_and_repos(self):
2561 def recursive_groups_and_repos(self):
2562 """
2562 """
2563 Recursive return all groups, with repositories in those groups
2563 Recursive return all groups, with repositories in those groups
2564 """
2564 """
2565 return self._recursive_objects()
2565 return self._recursive_objects()
2566
2566
2567 def recursive_groups(self):
2567 def recursive_groups(self):
2568 """
2568 """
2569 Returns all children groups for this group including children of children
2569 Returns all children groups for this group including children of children
2570 """
2570 """
2571 return self._recursive_objects(include_repos=False)
2571 return self._recursive_objects(include_repos=False)
2572
2572
2573 def get_new_name(self, group_name):
2573 def get_new_name(self, group_name):
2574 """
2574 """
2575 returns new full group name based on parent and new name
2575 returns new full group name based on parent and new name
2576
2576
2577 :param group_name:
2577 :param group_name:
2578 """
2578 """
2579 path_prefix = (self.parent_group.full_path_splitted if
2579 path_prefix = (self.parent_group.full_path_splitted if
2580 self.parent_group else [])
2580 self.parent_group else [])
2581 return RepoGroup.url_sep().join(path_prefix + [group_name])
2581 return RepoGroup.url_sep().join(path_prefix + [group_name])
2582
2582
2583 def permissions(self, with_admins=True, with_owner=True):
2583 def permissions(self, with_admins=True, with_owner=True):
2584 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2584 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2585 q = q.options(joinedload(UserRepoGroupToPerm.group),
2585 q = q.options(joinedload(UserRepoGroupToPerm.group),
2586 joinedload(UserRepoGroupToPerm.user),
2586 joinedload(UserRepoGroupToPerm.user),
2587 joinedload(UserRepoGroupToPerm.permission),)
2587 joinedload(UserRepoGroupToPerm.permission),)
2588
2588
2589 # get owners and admins and permissions. We do a trick of re-writing
2589 # get owners and admins and permissions. We do a trick of re-writing
2590 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2590 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2591 # has a global reference and changing one object propagates to all
2591 # has a global reference and changing one object propagates to all
2592 # others. This means if admin is also an owner admin_row that change
2592 # others. This means if admin is also an owner admin_row that change
2593 # would propagate to both objects
2593 # would propagate to both objects
2594 perm_rows = []
2594 perm_rows = []
2595 for _usr in q.all():
2595 for _usr in q.all():
2596 usr = AttributeDict(_usr.user.get_dict())
2596 usr = AttributeDict(_usr.user.get_dict())
2597 usr.permission = _usr.permission.permission_name
2597 usr.permission = _usr.permission.permission_name
2598 perm_rows.append(usr)
2598 perm_rows.append(usr)
2599
2599
2600 # filter the perm rows by 'default' first and then sort them by
2600 # filter the perm rows by 'default' first and then sort them by
2601 # admin,write,read,none permissions sorted again alphabetically in
2601 # admin,write,read,none permissions sorted again alphabetically in
2602 # each group
2602 # each group
2603 perm_rows = sorted(perm_rows, key=display_user_sort)
2603 perm_rows = sorted(perm_rows, key=display_user_sort)
2604
2604
2605 _admin_perm = 'group.admin'
2605 _admin_perm = 'group.admin'
2606 owner_row = []
2606 owner_row = []
2607 if with_owner:
2607 if with_owner:
2608 usr = AttributeDict(self.user.get_dict())
2608 usr = AttributeDict(self.user.get_dict())
2609 usr.owner_row = True
2609 usr.owner_row = True
2610 usr.permission = _admin_perm
2610 usr.permission = _admin_perm
2611 owner_row.append(usr)
2611 owner_row.append(usr)
2612
2612
2613 super_admin_rows = []
2613 super_admin_rows = []
2614 if with_admins:
2614 if with_admins:
2615 for usr in User.get_all_super_admins():
2615 for usr in User.get_all_super_admins():
2616 # if this admin is also owner, don't double the record
2616 # if this admin is also owner, don't double the record
2617 if usr.user_id == owner_row[0].user_id:
2617 if usr.user_id == owner_row[0].user_id:
2618 owner_row[0].admin_row = True
2618 owner_row[0].admin_row = True
2619 else:
2619 else:
2620 usr = AttributeDict(usr.get_dict())
2620 usr = AttributeDict(usr.get_dict())
2621 usr.admin_row = True
2621 usr.admin_row = True
2622 usr.permission = _admin_perm
2622 usr.permission = _admin_perm
2623 super_admin_rows.append(usr)
2623 super_admin_rows.append(usr)
2624
2624
2625 return super_admin_rows + owner_row + perm_rows
2625 return super_admin_rows + owner_row + perm_rows
2626
2626
2627 def permission_user_groups(self):
2627 def permission_user_groups(self):
2628 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2628 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2629 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2629 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2630 joinedload(UserGroupRepoGroupToPerm.users_group),
2630 joinedload(UserGroupRepoGroupToPerm.users_group),
2631 joinedload(UserGroupRepoGroupToPerm.permission),)
2631 joinedload(UserGroupRepoGroupToPerm.permission),)
2632
2632
2633 perm_rows = []
2633 perm_rows = []
2634 for _user_group in q.all():
2634 for _user_group in q.all():
2635 usr = AttributeDict(_user_group.users_group.get_dict())
2635 usr = AttributeDict(_user_group.users_group.get_dict())
2636 usr.permission = _user_group.permission.permission_name
2636 usr.permission = _user_group.permission.permission_name
2637 perm_rows.append(usr)
2637 perm_rows.append(usr)
2638
2638
2639 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2639 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2640 return perm_rows
2640 return perm_rows
2641
2641
2642 def get_api_data(self):
2642 def get_api_data(self):
2643 """
2643 """
2644 Common function for generating api data
2644 Common function for generating api data
2645
2645
2646 """
2646 """
2647 group = self
2647 group = self
2648 data = {
2648 data = {
2649 'group_id': group.group_id,
2649 'group_id': group.group_id,
2650 'group_name': group.group_name,
2650 'group_name': group.group_name,
2651 'group_description': group.description_safe,
2651 'group_description': group.description_safe,
2652 'parent_group': group.parent_group.group_name if group.parent_group else None,
2652 'parent_group': group.parent_group.group_name if group.parent_group else None,
2653 'repositories': [x.repo_name for x in group.repositories],
2653 'repositories': [x.repo_name for x in group.repositories],
2654 'owner': group.user.username,
2654 'owner': group.user.username,
2655 }
2655 }
2656 return data
2656 return data
2657
2657
2658
2658
2659 class Permission(Base, BaseModel):
2659 class Permission(Base, BaseModel):
2660 __tablename__ = 'permissions'
2660 __tablename__ = 'permissions'
2661 __table_args__ = (
2661 __table_args__ = (
2662 Index('p_perm_name_idx', 'permission_name'),
2662 Index('p_perm_name_idx', 'permission_name'),
2663 base_table_args,
2663 base_table_args,
2664 )
2664 )
2665
2665
2666 PERMS = [
2666 PERMS = [
2667 ('hg.admin', _('RhodeCode Super Administrator')),
2667 ('hg.admin', _('RhodeCode Super Administrator')),
2668
2668
2669 ('repository.none', _('Repository no access')),
2669 ('repository.none', _('Repository no access')),
2670 ('repository.read', _('Repository read access')),
2670 ('repository.read', _('Repository read access')),
2671 ('repository.write', _('Repository write access')),
2671 ('repository.write', _('Repository write access')),
2672 ('repository.admin', _('Repository admin access')),
2672 ('repository.admin', _('Repository admin access')),
2673
2673
2674 ('group.none', _('Repository group no access')),
2674 ('group.none', _('Repository group no access')),
2675 ('group.read', _('Repository group read access')),
2675 ('group.read', _('Repository group read access')),
2676 ('group.write', _('Repository group write access')),
2676 ('group.write', _('Repository group write access')),
2677 ('group.admin', _('Repository group admin access')),
2677 ('group.admin', _('Repository group admin access')),
2678
2678
2679 ('usergroup.none', _('User group no access')),
2679 ('usergroup.none', _('User group no access')),
2680 ('usergroup.read', _('User group read access')),
2680 ('usergroup.read', _('User group read access')),
2681 ('usergroup.write', _('User group write access')),
2681 ('usergroup.write', _('User group write access')),
2682 ('usergroup.admin', _('User group admin access')),
2682 ('usergroup.admin', _('User group admin access')),
2683
2683
2684 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2684 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2685 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2685 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2686
2686
2687 ('hg.usergroup.create.false', _('User Group creation disabled')),
2687 ('hg.usergroup.create.false', _('User Group creation disabled')),
2688 ('hg.usergroup.create.true', _('User Group creation enabled')),
2688 ('hg.usergroup.create.true', _('User Group creation enabled')),
2689
2689
2690 ('hg.create.none', _('Repository creation disabled')),
2690 ('hg.create.none', _('Repository creation disabled')),
2691 ('hg.create.repository', _('Repository creation enabled')),
2691 ('hg.create.repository', _('Repository creation enabled')),
2692 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2692 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2693 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2693 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2694
2694
2695 ('hg.fork.none', _('Repository forking disabled')),
2695 ('hg.fork.none', _('Repository forking disabled')),
2696 ('hg.fork.repository', _('Repository forking enabled')),
2696 ('hg.fork.repository', _('Repository forking enabled')),
2697
2697
2698 ('hg.register.none', _('Registration disabled')),
2698 ('hg.register.none', _('Registration disabled')),
2699 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2699 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2700 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2700 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2701
2701
2702 ('hg.password_reset.enabled', _('Password reset enabled')),
2702 ('hg.password_reset.enabled', _('Password reset enabled')),
2703 ('hg.password_reset.hidden', _('Password reset hidden')),
2703 ('hg.password_reset.hidden', _('Password reset hidden')),
2704 ('hg.password_reset.disabled', _('Password reset disabled')),
2704 ('hg.password_reset.disabled', _('Password reset disabled')),
2705
2705
2706 ('hg.extern_activate.manual', _('Manual activation of external account')),
2706 ('hg.extern_activate.manual', _('Manual activation of external account')),
2707 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2707 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2708
2708
2709 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2709 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2710 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2710 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2711 ]
2711 ]
2712
2712
2713 # definition of system default permissions for DEFAULT user
2713 # definition of system default permissions for DEFAULT user
2714 DEFAULT_USER_PERMISSIONS = [
2714 DEFAULT_USER_PERMISSIONS = [
2715 'repository.read',
2715 'repository.read',
2716 'group.read',
2716 'group.read',
2717 'usergroup.read',
2717 'usergroup.read',
2718 'hg.create.repository',
2718 'hg.create.repository',
2719 'hg.repogroup.create.false',
2719 'hg.repogroup.create.false',
2720 'hg.usergroup.create.false',
2720 'hg.usergroup.create.false',
2721 'hg.create.write_on_repogroup.true',
2721 'hg.create.write_on_repogroup.true',
2722 'hg.fork.repository',
2722 'hg.fork.repository',
2723 'hg.register.manual_activate',
2723 'hg.register.manual_activate',
2724 'hg.password_reset.enabled',
2724 'hg.password_reset.enabled',
2725 'hg.extern_activate.auto',
2725 'hg.extern_activate.auto',
2726 'hg.inherit_default_perms.true',
2726 'hg.inherit_default_perms.true',
2727 ]
2727 ]
2728
2728
2729 # defines which permissions are more important higher the more important
2729 # defines which permissions are more important higher the more important
2730 # Weight defines which permissions are more important.
2730 # Weight defines which permissions are more important.
2731 # The higher number the more important.
2731 # The higher number the more important.
2732 PERM_WEIGHTS = {
2732 PERM_WEIGHTS = {
2733 'repository.none': 0,
2733 'repository.none': 0,
2734 'repository.read': 1,
2734 'repository.read': 1,
2735 'repository.write': 3,
2735 'repository.write': 3,
2736 'repository.admin': 4,
2736 'repository.admin': 4,
2737
2737
2738 'group.none': 0,
2738 'group.none': 0,
2739 'group.read': 1,
2739 'group.read': 1,
2740 'group.write': 3,
2740 'group.write': 3,
2741 'group.admin': 4,
2741 'group.admin': 4,
2742
2742
2743 'usergroup.none': 0,
2743 'usergroup.none': 0,
2744 'usergroup.read': 1,
2744 'usergroup.read': 1,
2745 'usergroup.write': 3,
2745 'usergroup.write': 3,
2746 'usergroup.admin': 4,
2746 'usergroup.admin': 4,
2747
2747
2748 'hg.repogroup.create.false': 0,
2748 'hg.repogroup.create.false': 0,
2749 'hg.repogroup.create.true': 1,
2749 'hg.repogroup.create.true': 1,
2750
2750
2751 'hg.usergroup.create.false': 0,
2751 'hg.usergroup.create.false': 0,
2752 'hg.usergroup.create.true': 1,
2752 'hg.usergroup.create.true': 1,
2753
2753
2754 'hg.fork.none': 0,
2754 'hg.fork.none': 0,
2755 'hg.fork.repository': 1,
2755 'hg.fork.repository': 1,
2756 'hg.create.none': 0,
2756 'hg.create.none': 0,
2757 'hg.create.repository': 1
2757 'hg.create.repository': 1
2758 }
2758 }
2759
2759
2760 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2760 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2761 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2761 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2762 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2762 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2763
2763
2764 def __unicode__(self):
2764 def __unicode__(self):
2765 return u"<%s('%s:%s')>" % (
2765 return u"<%s('%s:%s')>" % (
2766 self.__class__.__name__, self.permission_id, self.permission_name
2766 self.__class__.__name__, self.permission_id, self.permission_name
2767 )
2767 )
2768
2768
2769 @classmethod
2769 @classmethod
2770 def get_by_key(cls, key):
2770 def get_by_key(cls, key):
2771 return cls.query().filter(cls.permission_name == key).scalar()
2771 return cls.query().filter(cls.permission_name == key).scalar()
2772
2772
2773 @classmethod
2773 @classmethod
2774 def get_default_repo_perms(cls, user_id, repo_id=None):
2774 def get_default_repo_perms(cls, user_id, repo_id=None):
2775 q = Session().query(UserRepoToPerm, Repository, Permission)\
2775 q = Session().query(UserRepoToPerm, Repository, Permission)\
2776 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2776 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2777 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2777 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2778 .filter(UserRepoToPerm.user_id == user_id)
2778 .filter(UserRepoToPerm.user_id == user_id)
2779 if repo_id:
2779 if repo_id:
2780 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2780 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2781 return q.all()
2781 return q.all()
2782
2782
2783 @classmethod
2783 @classmethod
2784 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2784 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2785 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2785 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2786 .join(
2786 .join(
2787 Permission,
2787 Permission,
2788 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2788 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2789 .join(
2789 .join(
2790 Repository,
2790 Repository,
2791 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2791 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2792 .join(
2792 .join(
2793 UserGroup,
2793 UserGroup,
2794 UserGroupRepoToPerm.users_group_id ==
2794 UserGroupRepoToPerm.users_group_id ==
2795 UserGroup.users_group_id)\
2795 UserGroup.users_group_id)\
2796 .join(
2796 .join(
2797 UserGroupMember,
2797 UserGroupMember,
2798 UserGroupRepoToPerm.users_group_id ==
2798 UserGroupRepoToPerm.users_group_id ==
2799 UserGroupMember.users_group_id)\
2799 UserGroupMember.users_group_id)\
2800 .filter(
2800 .filter(
2801 UserGroupMember.user_id == user_id,
2801 UserGroupMember.user_id == user_id,
2802 UserGroup.users_group_active == true())
2802 UserGroup.users_group_active == true())
2803 if repo_id:
2803 if repo_id:
2804 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2804 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2805 return q.all()
2805 return q.all()
2806
2806
2807 @classmethod
2807 @classmethod
2808 def get_default_group_perms(cls, user_id, repo_group_id=None):
2808 def get_default_group_perms(cls, user_id, repo_group_id=None):
2809 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2809 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2810 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2810 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2811 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2811 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2812 .filter(UserRepoGroupToPerm.user_id == user_id)
2812 .filter(UserRepoGroupToPerm.user_id == user_id)
2813 if repo_group_id:
2813 if repo_group_id:
2814 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2814 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2815 return q.all()
2815 return q.all()
2816
2816
2817 @classmethod
2817 @classmethod
2818 def get_default_group_perms_from_user_group(
2818 def get_default_group_perms_from_user_group(
2819 cls, user_id, repo_group_id=None):
2819 cls, user_id, repo_group_id=None):
2820 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2820 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2821 .join(
2821 .join(
2822 Permission,
2822 Permission,
2823 UserGroupRepoGroupToPerm.permission_id ==
2823 UserGroupRepoGroupToPerm.permission_id ==
2824 Permission.permission_id)\
2824 Permission.permission_id)\
2825 .join(
2825 .join(
2826 RepoGroup,
2826 RepoGroup,
2827 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2827 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2828 .join(
2828 .join(
2829 UserGroup,
2829 UserGroup,
2830 UserGroupRepoGroupToPerm.users_group_id ==
2830 UserGroupRepoGroupToPerm.users_group_id ==
2831 UserGroup.users_group_id)\
2831 UserGroup.users_group_id)\
2832 .join(
2832 .join(
2833 UserGroupMember,
2833 UserGroupMember,
2834 UserGroupRepoGroupToPerm.users_group_id ==
2834 UserGroupRepoGroupToPerm.users_group_id ==
2835 UserGroupMember.users_group_id)\
2835 UserGroupMember.users_group_id)\
2836 .filter(
2836 .filter(
2837 UserGroupMember.user_id == user_id,
2837 UserGroupMember.user_id == user_id,
2838 UserGroup.users_group_active == true())
2838 UserGroup.users_group_active == true())
2839 if repo_group_id:
2839 if repo_group_id:
2840 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2840 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2841 return q.all()
2841 return q.all()
2842
2842
2843 @classmethod
2843 @classmethod
2844 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2844 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2845 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2845 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2846 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2846 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2847 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2847 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2848 .filter(UserUserGroupToPerm.user_id == user_id)
2848 .filter(UserUserGroupToPerm.user_id == user_id)
2849 if user_group_id:
2849 if user_group_id:
2850 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2850 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2851 return q.all()
2851 return q.all()
2852
2852
2853 @classmethod
2853 @classmethod
2854 def get_default_user_group_perms_from_user_group(
2854 def get_default_user_group_perms_from_user_group(
2855 cls, user_id, user_group_id=None):
2855 cls, user_id, user_group_id=None):
2856 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2856 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2857 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2857 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2858 .join(
2858 .join(
2859 Permission,
2859 Permission,
2860 UserGroupUserGroupToPerm.permission_id ==
2860 UserGroupUserGroupToPerm.permission_id ==
2861 Permission.permission_id)\
2861 Permission.permission_id)\
2862 .join(
2862 .join(
2863 TargetUserGroup,
2863 TargetUserGroup,
2864 UserGroupUserGroupToPerm.target_user_group_id ==
2864 UserGroupUserGroupToPerm.target_user_group_id ==
2865 TargetUserGroup.users_group_id)\
2865 TargetUserGroup.users_group_id)\
2866 .join(
2866 .join(
2867 UserGroup,
2867 UserGroup,
2868 UserGroupUserGroupToPerm.user_group_id ==
2868 UserGroupUserGroupToPerm.user_group_id ==
2869 UserGroup.users_group_id)\
2869 UserGroup.users_group_id)\
2870 .join(
2870 .join(
2871 UserGroupMember,
2871 UserGroupMember,
2872 UserGroupUserGroupToPerm.user_group_id ==
2872 UserGroupUserGroupToPerm.user_group_id ==
2873 UserGroupMember.users_group_id)\
2873 UserGroupMember.users_group_id)\
2874 .filter(
2874 .filter(
2875 UserGroupMember.user_id == user_id,
2875 UserGroupMember.user_id == user_id,
2876 UserGroup.users_group_active == true())
2876 UserGroup.users_group_active == true())
2877 if user_group_id:
2877 if user_group_id:
2878 q = q.filter(
2878 q = q.filter(
2879 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2879 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2880
2880
2881 return q.all()
2881 return q.all()
2882
2882
2883
2883
2884 class UserRepoToPerm(Base, BaseModel):
2884 class UserRepoToPerm(Base, BaseModel):
2885 __tablename__ = 'repo_to_perm'
2885 __tablename__ = 'repo_to_perm'
2886 __table_args__ = (
2886 __table_args__ = (
2887 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2887 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2888 base_table_args
2888 base_table_args
2889 )
2889 )
2890
2890
2891 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2891 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2892 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2892 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2893 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2893 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2894 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2894 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2895
2895
2896 user = relationship('User')
2896 user = relationship('User')
2897 repository = relationship('Repository')
2897 repository = relationship('Repository')
2898 permission = relationship('Permission')
2898 permission = relationship('Permission')
2899
2899
2900 @classmethod
2900 @classmethod
2901 def create(cls, user, repository, permission):
2901 def create(cls, user, repository, permission):
2902 n = cls()
2902 n = cls()
2903 n.user = user
2903 n.user = user
2904 n.repository = repository
2904 n.repository = repository
2905 n.permission = permission
2905 n.permission = permission
2906 Session().add(n)
2906 Session().add(n)
2907 return n
2907 return n
2908
2908
2909 def __unicode__(self):
2909 def __unicode__(self):
2910 return u'<%s => %s >' % (self.user, self.repository)
2910 return u'<%s => %s >' % (self.user, self.repository)
2911
2911
2912
2912
2913 class UserUserGroupToPerm(Base, BaseModel):
2913 class UserUserGroupToPerm(Base, BaseModel):
2914 __tablename__ = 'user_user_group_to_perm'
2914 __tablename__ = 'user_user_group_to_perm'
2915 __table_args__ = (
2915 __table_args__ = (
2916 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2916 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2917 base_table_args
2917 base_table_args
2918 )
2918 )
2919
2919
2920 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2920 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2921 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2921 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2922 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2922 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2923 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2923 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2924
2924
2925 user = relationship('User')
2925 user = relationship('User')
2926 user_group = relationship('UserGroup')
2926 user_group = relationship('UserGroup')
2927 permission = relationship('Permission')
2927 permission = relationship('Permission')
2928
2928
2929 @classmethod
2929 @classmethod
2930 def create(cls, user, user_group, permission):
2930 def create(cls, user, user_group, permission):
2931 n = cls()
2931 n = cls()
2932 n.user = user
2932 n.user = user
2933 n.user_group = user_group
2933 n.user_group = user_group
2934 n.permission = permission
2934 n.permission = permission
2935 Session().add(n)
2935 Session().add(n)
2936 return n
2936 return n
2937
2937
2938 def __unicode__(self):
2938 def __unicode__(self):
2939 return u'<%s => %s >' % (self.user, self.user_group)
2939 return u'<%s => %s >' % (self.user, self.user_group)
2940
2940
2941
2941
2942 class UserToPerm(Base, BaseModel):
2942 class UserToPerm(Base, BaseModel):
2943 __tablename__ = 'user_to_perm'
2943 __tablename__ = 'user_to_perm'
2944 __table_args__ = (
2944 __table_args__ = (
2945 UniqueConstraint('user_id', 'permission_id'),
2945 UniqueConstraint('user_id', 'permission_id'),
2946 base_table_args
2946 base_table_args
2947 )
2947 )
2948
2948
2949 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2949 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2950 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2950 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2951 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2951 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2952
2952
2953 user = relationship('User')
2953 user = relationship('User')
2954 permission = relationship('Permission', lazy='joined')
2954 permission = relationship('Permission', lazy='joined')
2955
2955
2956 def __unicode__(self):
2956 def __unicode__(self):
2957 return u'<%s => %s >' % (self.user, self.permission)
2957 return u'<%s => %s >' % (self.user, self.permission)
2958
2958
2959
2959
2960 class UserGroupRepoToPerm(Base, BaseModel):
2960 class UserGroupRepoToPerm(Base, BaseModel):
2961 __tablename__ = 'users_group_repo_to_perm'
2961 __tablename__ = 'users_group_repo_to_perm'
2962 __table_args__ = (
2962 __table_args__ = (
2963 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2963 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2964 base_table_args
2964 base_table_args
2965 )
2965 )
2966
2966
2967 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2967 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2968 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2968 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2969 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2969 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2970 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2970 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2971
2971
2972 users_group = relationship('UserGroup')
2972 users_group = relationship('UserGroup')
2973 permission = relationship('Permission')
2973 permission = relationship('Permission')
2974 repository = relationship('Repository')
2974 repository = relationship('Repository')
2975
2975
2976 @classmethod
2976 @classmethod
2977 def create(cls, users_group, repository, permission):
2977 def create(cls, users_group, repository, permission):
2978 n = cls()
2978 n = cls()
2979 n.users_group = users_group
2979 n.users_group = users_group
2980 n.repository = repository
2980 n.repository = repository
2981 n.permission = permission
2981 n.permission = permission
2982 Session().add(n)
2982 Session().add(n)
2983 return n
2983 return n
2984
2984
2985 def __unicode__(self):
2985 def __unicode__(self):
2986 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2986 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2987
2987
2988
2988
2989 class UserGroupUserGroupToPerm(Base, BaseModel):
2989 class UserGroupUserGroupToPerm(Base, BaseModel):
2990 __tablename__ = 'user_group_user_group_to_perm'
2990 __tablename__ = 'user_group_user_group_to_perm'
2991 __table_args__ = (
2991 __table_args__ = (
2992 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2992 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2993 CheckConstraint('target_user_group_id != user_group_id'),
2993 CheckConstraint('target_user_group_id != user_group_id'),
2994 base_table_args
2994 base_table_args
2995 )
2995 )
2996
2996
2997 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2997 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2998 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2998 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2999 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2999 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3000 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3000 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3001
3001
3002 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3002 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3003 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3003 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3004 permission = relationship('Permission')
3004 permission = relationship('Permission')
3005
3005
3006 @classmethod
3006 @classmethod
3007 def create(cls, target_user_group, user_group, permission):
3007 def create(cls, target_user_group, user_group, permission):
3008 n = cls()
3008 n = cls()
3009 n.target_user_group = target_user_group
3009 n.target_user_group = target_user_group
3010 n.user_group = user_group
3010 n.user_group = user_group
3011 n.permission = permission
3011 n.permission = permission
3012 Session().add(n)
3012 Session().add(n)
3013 return n
3013 return n
3014
3014
3015 def __unicode__(self):
3015 def __unicode__(self):
3016 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3016 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3017
3017
3018
3018
3019 class UserGroupToPerm(Base, BaseModel):
3019 class UserGroupToPerm(Base, BaseModel):
3020 __tablename__ = 'users_group_to_perm'
3020 __tablename__ = 'users_group_to_perm'
3021 __table_args__ = (
3021 __table_args__ = (
3022 UniqueConstraint('users_group_id', 'permission_id',),
3022 UniqueConstraint('users_group_id', 'permission_id',),
3023 base_table_args
3023 base_table_args
3024 )
3024 )
3025
3025
3026 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3026 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3027 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3027 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3028 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3028 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3029
3029
3030 users_group = relationship('UserGroup')
3030 users_group = relationship('UserGroup')
3031 permission = relationship('Permission')
3031 permission = relationship('Permission')
3032
3032
3033
3033
3034 class UserRepoGroupToPerm(Base, BaseModel):
3034 class UserRepoGroupToPerm(Base, BaseModel):
3035 __tablename__ = 'user_repo_group_to_perm'
3035 __tablename__ = 'user_repo_group_to_perm'
3036 __table_args__ = (
3036 __table_args__ = (
3037 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3037 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3038 base_table_args
3038 base_table_args
3039 )
3039 )
3040
3040
3041 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3041 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3042 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3042 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3043 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3043 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3044 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3044 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3045
3045
3046 user = relationship('User')
3046 user = relationship('User')
3047 group = relationship('RepoGroup')
3047 group = relationship('RepoGroup')
3048 permission = relationship('Permission')
3048 permission = relationship('Permission')
3049
3049
3050 @classmethod
3050 @classmethod
3051 def create(cls, user, repository_group, permission):
3051 def create(cls, user, repository_group, permission):
3052 n = cls()
3052 n = cls()
3053 n.user = user
3053 n.user = user
3054 n.group = repository_group
3054 n.group = repository_group
3055 n.permission = permission
3055 n.permission = permission
3056 Session().add(n)
3056 Session().add(n)
3057 return n
3057 return n
3058
3058
3059
3059
3060 class UserGroupRepoGroupToPerm(Base, BaseModel):
3060 class UserGroupRepoGroupToPerm(Base, BaseModel):
3061 __tablename__ = 'users_group_repo_group_to_perm'
3061 __tablename__ = 'users_group_repo_group_to_perm'
3062 __table_args__ = (
3062 __table_args__ = (
3063 UniqueConstraint('users_group_id', 'group_id'),
3063 UniqueConstraint('users_group_id', 'group_id'),
3064 base_table_args
3064 base_table_args
3065 )
3065 )
3066
3066
3067 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3067 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3068 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3068 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3069 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3069 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3070 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3070 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3071
3071
3072 users_group = relationship('UserGroup')
3072 users_group = relationship('UserGroup')
3073 permission = relationship('Permission')
3073 permission = relationship('Permission')
3074 group = relationship('RepoGroup')
3074 group = relationship('RepoGroup')
3075
3075
3076 @classmethod
3076 @classmethod
3077 def create(cls, user_group, repository_group, permission):
3077 def create(cls, user_group, repository_group, permission):
3078 n = cls()
3078 n = cls()
3079 n.users_group = user_group
3079 n.users_group = user_group
3080 n.group = repository_group
3080 n.group = repository_group
3081 n.permission = permission
3081 n.permission = permission
3082 Session().add(n)
3082 Session().add(n)
3083 return n
3083 return n
3084
3084
3085 def __unicode__(self):
3085 def __unicode__(self):
3086 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3086 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3087
3087
3088
3088
3089 class Statistics(Base, BaseModel):
3089 class Statistics(Base, BaseModel):
3090 __tablename__ = 'statistics'
3090 __tablename__ = 'statistics'
3091 __table_args__ = (
3091 __table_args__ = (
3092 base_table_args
3092 base_table_args
3093 )
3093 )
3094
3094
3095 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3095 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3096 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3096 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3097 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3097 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3098 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3098 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3099 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3099 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3100 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3100 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3101
3101
3102 repository = relationship('Repository', single_parent=True)
3102 repository = relationship('Repository', single_parent=True)
3103
3103
3104
3104
3105 class UserFollowing(Base, BaseModel):
3105 class UserFollowing(Base, BaseModel):
3106 __tablename__ = 'user_followings'
3106 __tablename__ = 'user_followings'
3107 __table_args__ = (
3107 __table_args__ = (
3108 UniqueConstraint('user_id', 'follows_repository_id'),
3108 UniqueConstraint('user_id', 'follows_repository_id'),
3109 UniqueConstraint('user_id', 'follows_user_id'),
3109 UniqueConstraint('user_id', 'follows_user_id'),
3110 base_table_args
3110 base_table_args
3111 )
3111 )
3112
3112
3113 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3113 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3114 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3114 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3115 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3115 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3116 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3116 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3117 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3117 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3118
3118
3119 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3119 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3120
3120
3121 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3121 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3122 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3122 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3123
3123
3124 @classmethod
3124 @classmethod
3125 def get_repo_followers(cls, repo_id):
3125 def get_repo_followers(cls, repo_id):
3126 return cls.query().filter(cls.follows_repo_id == repo_id)
3126 return cls.query().filter(cls.follows_repo_id == repo_id)
3127
3127
3128
3128
3129 class CacheKey(Base, BaseModel):
3129 class CacheKey(Base, BaseModel):
3130 __tablename__ = 'cache_invalidation'
3130 __tablename__ = 'cache_invalidation'
3131 __table_args__ = (
3131 __table_args__ = (
3132 UniqueConstraint('cache_key'),
3132 UniqueConstraint('cache_key'),
3133 Index('key_idx', 'cache_key'),
3133 Index('key_idx', 'cache_key'),
3134 base_table_args,
3134 base_table_args,
3135 )
3135 )
3136
3136
3137 CACHE_TYPE_ATOM = 'ATOM'
3137 CACHE_TYPE_ATOM = 'ATOM'
3138 CACHE_TYPE_RSS = 'RSS'
3138 CACHE_TYPE_RSS = 'RSS'
3139 CACHE_TYPE_README = 'README'
3139 CACHE_TYPE_README = 'README'
3140
3140
3141 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3141 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3142 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3142 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3143 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3143 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3144 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3144 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3145
3145
3146 def __init__(self, cache_key, cache_args=''):
3146 def __init__(self, cache_key, cache_args=''):
3147 self.cache_key = cache_key
3147 self.cache_key = cache_key
3148 self.cache_args = cache_args
3148 self.cache_args = cache_args
3149 self.cache_active = False
3149 self.cache_active = False
3150
3150
3151 def __unicode__(self):
3151 def __unicode__(self):
3152 return u"<%s('%s:%s[%s]')>" % (
3152 return u"<%s('%s:%s[%s]')>" % (
3153 self.__class__.__name__,
3153 self.__class__.__name__,
3154 self.cache_id, self.cache_key, self.cache_active)
3154 self.cache_id, self.cache_key, self.cache_active)
3155
3155
3156 def _cache_key_partition(self):
3156 def _cache_key_partition(self):
3157 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3157 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3158 return prefix, repo_name, suffix
3158 return prefix, repo_name, suffix
3159
3159
3160 def get_prefix(self):
3160 def get_prefix(self):
3161 """
3161 """
3162 Try to extract prefix from existing cache key. The key could consist
3162 Try to extract prefix from existing cache key. The key could consist
3163 of prefix, repo_name, suffix
3163 of prefix, repo_name, suffix
3164 """
3164 """
3165 # this returns prefix, repo_name, suffix
3165 # this returns prefix, repo_name, suffix
3166 return self._cache_key_partition()[0]
3166 return self._cache_key_partition()[0]
3167
3167
3168 def get_suffix(self):
3168 def get_suffix(self):
3169 """
3169 """
3170 get suffix that might have been used in _get_cache_key to
3170 get suffix that might have been used in _get_cache_key to
3171 generate self.cache_key. Only used for informational purposes
3171 generate self.cache_key. Only used for informational purposes
3172 in repo_edit.mako.
3172 in repo_edit.mako.
3173 """
3173 """
3174 # prefix, repo_name, suffix
3174 # prefix, repo_name, suffix
3175 return self._cache_key_partition()[2]
3175 return self._cache_key_partition()[2]
3176
3176
3177 @classmethod
3177 @classmethod
3178 def delete_all_cache(cls):
3178 def delete_all_cache(cls):
3179 """
3179 """
3180 Delete all cache keys from database.
3180 Delete all cache keys from database.
3181 Should only be run when all instances are down and all entries
3181 Should only be run when all instances are down and all entries
3182 thus stale.
3182 thus stale.
3183 """
3183 """
3184 cls.query().delete()
3184 cls.query().delete()
3185 Session().commit()
3185 Session().commit()
3186
3186
3187 @classmethod
3187 @classmethod
3188 def get_cache_key(cls, repo_name, cache_type):
3188 def get_cache_key(cls, repo_name, cache_type):
3189 """
3189 """
3190
3190
3191 Generate a cache key for this process of RhodeCode instance.
3191 Generate a cache key for this process of RhodeCode instance.
3192 Prefix most likely will be process id or maybe explicitly set
3192 Prefix most likely will be process id or maybe explicitly set
3193 instance_id from .ini file.
3193 instance_id from .ini file.
3194 """
3194 """
3195 import rhodecode
3195 import rhodecode
3196 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3196 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3197
3197
3198 repo_as_unicode = safe_unicode(repo_name)
3198 repo_as_unicode = safe_unicode(repo_name)
3199 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3199 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3200 if cache_type else repo_as_unicode
3200 if cache_type else repo_as_unicode
3201
3201
3202 return u'{}{}'.format(prefix, key)
3202 return u'{}{}'.format(prefix, key)
3203
3203
3204 @classmethod
3204 @classmethod
3205 def set_invalidate(cls, repo_name, delete=False):
3205 def set_invalidate(cls, repo_name, delete=False):
3206 """
3206 """
3207 Mark all caches of a repo as invalid in the database.
3207 Mark all caches of a repo as invalid in the database.
3208 """
3208 """
3209
3209
3210 try:
3210 try:
3211 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3211 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3212 if delete:
3212 if delete:
3213 log.debug('cache objects deleted for repo %s',
3213 log.debug('cache objects deleted for repo %s',
3214 safe_str(repo_name))
3214 safe_str(repo_name))
3215 qry.delete()
3215 qry.delete()
3216 else:
3216 else:
3217 log.debug('cache objects marked as invalid for repo %s',
3217 log.debug('cache objects marked as invalid for repo %s',
3218 safe_str(repo_name))
3218 safe_str(repo_name))
3219 qry.update({"cache_active": False})
3219 qry.update({"cache_active": False})
3220
3220
3221 Session().commit()
3221 Session().commit()
3222 except Exception:
3222 except Exception:
3223 log.exception(
3223 log.exception(
3224 'Cache key invalidation failed for repository %s',
3224 'Cache key invalidation failed for repository %s',
3225 safe_str(repo_name))
3225 safe_str(repo_name))
3226 Session().rollback()
3226 Session().rollback()
3227
3227
3228 @classmethod
3228 @classmethod
3229 def get_active_cache(cls, cache_key):
3229 def get_active_cache(cls, cache_key):
3230 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3230 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3231 if inv_obj:
3231 if inv_obj:
3232 return inv_obj
3232 return inv_obj
3233 return None
3233 return None
3234
3234
3235 @classmethod
3235 @classmethod
3236 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3236 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3237 thread_scoped=False):
3237 thread_scoped=False):
3238 """
3238 """
3239 @cache_region('long_term')
3239 @cache_region('long_term')
3240 def _heavy_calculation(cache_key):
3240 def _heavy_calculation(cache_key):
3241 return 'result'
3241 return 'result'
3242
3242
3243 cache_context = CacheKey.repo_context_cache(
3243 cache_context = CacheKey.repo_context_cache(
3244 _heavy_calculation, repo_name, cache_type)
3244 _heavy_calculation, repo_name, cache_type)
3245
3245
3246 with cache_context as context:
3246 with cache_context as context:
3247 context.invalidate()
3247 context.invalidate()
3248 computed = context.compute()
3248 computed = context.compute()
3249
3249
3250 assert computed == 'result'
3250 assert computed == 'result'
3251 """
3251 """
3252 from rhodecode.lib import caches
3252 from rhodecode.lib import caches
3253 return caches.InvalidationContext(
3253 return caches.InvalidationContext(
3254 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3254 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3255
3255
3256
3256
3257 class ChangesetComment(Base, BaseModel):
3257 class ChangesetComment(Base, BaseModel):
3258 __tablename__ = 'changeset_comments'
3258 __tablename__ = 'changeset_comments'
3259 __table_args__ = (
3259 __table_args__ = (
3260 Index('cc_revision_idx', 'revision'),
3260 Index('cc_revision_idx', 'revision'),
3261 base_table_args,
3261 base_table_args,
3262 )
3262 )
3263
3263
3264 COMMENT_OUTDATED = u'comment_outdated'
3264 COMMENT_OUTDATED = u'comment_outdated'
3265 COMMENT_TYPE_NOTE = u'note'
3265 COMMENT_TYPE_NOTE = u'note'
3266 COMMENT_TYPE_TODO = u'todo'
3266 COMMENT_TYPE_TODO = u'todo'
3267 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3267 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3268
3268
3269 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3269 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3270 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3270 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3271 revision = Column('revision', String(40), nullable=True)
3271 revision = Column('revision', String(40), nullable=True)
3272 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3272 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3273 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3273 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3274 line_no = Column('line_no', Unicode(10), nullable=True)
3274 line_no = Column('line_no', Unicode(10), nullable=True)
3275 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3275 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3276 f_path = Column('f_path', Unicode(1000), nullable=True)
3276 f_path = Column('f_path', Unicode(1000), nullable=True)
3277 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3277 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3278 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3278 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3279 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3279 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3280 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3280 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3281 renderer = Column('renderer', Unicode(64), nullable=True)
3281 renderer = Column('renderer', Unicode(64), nullable=True)
3282 display_state = Column('display_state', Unicode(128), nullable=True)
3282 display_state = Column('display_state', Unicode(128), nullable=True)
3283
3283
3284 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3284 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3285 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3285 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3286 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3286 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3287 author = relationship('User', lazy='joined')
3287 author = relationship('User', lazy='joined')
3288 repo = relationship('Repository')
3288 repo = relationship('Repository')
3289 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3289 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3290 pull_request = relationship('PullRequest', lazy='joined')
3290 pull_request = relationship('PullRequest', lazy='joined')
3291 pull_request_version = relationship('PullRequestVersion')
3291 pull_request_version = relationship('PullRequestVersion')
3292
3292
3293 @classmethod
3293 @classmethod
3294 def get_users(cls, revision=None, pull_request_id=None):
3294 def get_users(cls, revision=None, pull_request_id=None):
3295 """
3295 """
3296 Returns user associated with this ChangesetComment. ie those
3296 Returns user associated with this ChangesetComment. ie those
3297 who actually commented
3297 who actually commented
3298
3298
3299 :param cls:
3299 :param cls:
3300 :param revision:
3300 :param revision:
3301 """
3301 """
3302 q = Session().query(User)\
3302 q = Session().query(User)\
3303 .join(ChangesetComment.author)
3303 .join(ChangesetComment.author)
3304 if revision:
3304 if revision:
3305 q = q.filter(cls.revision == revision)
3305 q = q.filter(cls.revision == revision)
3306 elif pull_request_id:
3306 elif pull_request_id:
3307 q = q.filter(cls.pull_request_id == pull_request_id)
3307 q = q.filter(cls.pull_request_id == pull_request_id)
3308 return q.all()
3308 return q.all()
3309
3309
3310 @classmethod
3310 @classmethod
3311 def get_index_from_version(cls, pr_version, versions):
3311 def get_index_from_version(cls, pr_version, versions):
3312 num_versions = [x.pull_request_version_id for x in versions]
3312 num_versions = [x.pull_request_version_id for x in versions]
3313 try:
3313 try:
3314 return num_versions.index(pr_version) +1
3314 return num_versions.index(pr_version) +1
3315 except (IndexError, ValueError):
3315 except (IndexError, ValueError):
3316 return
3316 return
3317
3317
3318 @property
3318 @property
3319 def outdated(self):
3319 def outdated(self):
3320 return self.display_state == self.COMMENT_OUTDATED
3320 return self.display_state == self.COMMENT_OUTDATED
3321
3321
3322 def outdated_at_version(self, version):
3322 def outdated_at_version(self, version):
3323 """
3323 """
3324 Checks if comment is outdated for given pull request version
3324 Checks if comment is outdated for given pull request version
3325 """
3325 """
3326 return self.outdated and self.pull_request_version_id != version
3326 return self.outdated and self.pull_request_version_id != version
3327
3327
3328 def older_than_version(self, version):
3328 def older_than_version(self, version):
3329 """
3329 """
3330 Checks if comment is made from previous version than given
3330 Checks if comment is made from previous version than given
3331 """
3331 """
3332 if version is None:
3332 if version is None:
3333 return self.pull_request_version_id is not None
3333 return self.pull_request_version_id is not None
3334
3334
3335 return self.pull_request_version_id < version
3335 return self.pull_request_version_id < version
3336
3336
3337 @property
3337 @property
3338 def resolved(self):
3338 def resolved(self):
3339 return self.resolved_by[0] if self.resolved_by else None
3339 return self.resolved_by[0] if self.resolved_by else None
3340
3340
3341 @property
3341 @property
3342 def is_todo(self):
3342 def is_todo(self):
3343 return self.comment_type == self.COMMENT_TYPE_TODO
3343 return self.comment_type == self.COMMENT_TYPE_TODO
3344
3344
3345 @property
3345 @property
3346 def is_inline(self):
3346 def is_inline(self):
3347 return self.line_no and self.f_path
3347 return self.line_no and self.f_path
3348
3348
3349 def get_index_version(self, versions):
3349 def get_index_version(self, versions):
3350 return self.get_index_from_version(
3350 return self.get_index_from_version(
3351 self.pull_request_version_id, versions)
3351 self.pull_request_version_id, versions)
3352
3352
3353 def __repr__(self):
3353 def __repr__(self):
3354 if self.comment_id:
3354 if self.comment_id:
3355 return '<DB:Comment #%s>' % self.comment_id
3355 return '<DB:Comment #%s>' % self.comment_id
3356 else:
3356 else:
3357 return '<DB:Comment at %#x>' % id(self)
3357 return '<DB:Comment at %#x>' % id(self)
3358
3358
3359 def get_api_data(self):
3359 def get_api_data(self):
3360 comment = self
3360 comment = self
3361 data = {
3361 data = {
3362 'comment_id': comment.comment_id,
3362 'comment_id': comment.comment_id,
3363 'comment_type': comment.comment_type,
3363 'comment_type': comment.comment_type,
3364 'comment_text': comment.text,
3364 'comment_text': comment.text,
3365 'comment_status': comment.status_change,
3365 'comment_status': comment.status_change,
3366 'comment_f_path': comment.f_path,
3366 'comment_f_path': comment.f_path,
3367 'comment_lineno': comment.line_no,
3367 'comment_lineno': comment.line_no,
3368 'comment_author': comment.author,
3368 'comment_author': comment.author,
3369 'comment_created_on': comment.created_on
3369 'comment_created_on': comment.created_on
3370 }
3370 }
3371 return data
3371 return data
3372
3372
3373 def __json__(self):
3373 def __json__(self):
3374 data = dict()
3374 data = dict()
3375 data.update(self.get_api_data())
3375 data.update(self.get_api_data())
3376 return data
3376 return data
3377
3377
3378
3378
3379 class ChangesetStatus(Base, BaseModel):
3379 class ChangesetStatus(Base, BaseModel):
3380 __tablename__ = 'changeset_statuses'
3380 __tablename__ = 'changeset_statuses'
3381 __table_args__ = (
3381 __table_args__ = (
3382 Index('cs_revision_idx', 'revision'),
3382 Index('cs_revision_idx', 'revision'),
3383 Index('cs_version_idx', 'version'),
3383 Index('cs_version_idx', 'version'),
3384 UniqueConstraint('repo_id', 'revision', 'version'),
3384 UniqueConstraint('repo_id', 'revision', 'version'),
3385 base_table_args
3385 base_table_args
3386 )
3386 )
3387
3387
3388 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3388 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3389 STATUS_APPROVED = 'approved'
3389 STATUS_APPROVED = 'approved'
3390 STATUS_REJECTED = 'rejected'
3390 STATUS_REJECTED = 'rejected'
3391 STATUS_UNDER_REVIEW = 'under_review'
3391 STATUS_UNDER_REVIEW = 'under_review'
3392
3392
3393 STATUSES = [
3393 STATUSES = [
3394 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3394 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3395 (STATUS_APPROVED, _("Approved")),
3395 (STATUS_APPROVED, _("Approved")),
3396 (STATUS_REJECTED, _("Rejected")),
3396 (STATUS_REJECTED, _("Rejected")),
3397 (STATUS_UNDER_REVIEW, _("Under Review")),
3397 (STATUS_UNDER_REVIEW, _("Under Review")),
3398 ]
3398 ]
3399
3399
3400 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3400 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3401 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3401 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3402 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3402 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3403 revision = Column('revision', String(40), nullable=False)
3403 revision = Column('revision', String(40), nullable=False)
3404 status = Column('status', String(128), nullable=False, default=DEFAULT)
3404 status = Column('status', String(128), nullable=False, default=DEFAULT)
3405 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3405 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3406 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3406 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3407 version = Column('version', Integer(), nullable=False, default=0)
3407 version = Column('version', Integer(), nullable=False, default=0)
3408 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3408 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3409
3409
3410 author = relationship('User', lazy='joined')
3410 author = relationship('User', lazy='joined')
3411 repo = relationship('Repository')
3411 repo = relationship('Repository')
3412 comment = relationship('ChangesetComment', lazy='joined')
3412 comment = relationship('ChangesetComment', lazy='joined')
3413 pull_request = relationship('PullRequest', lazy='joined')
3413 pull_request = relationship('PullRequest', lazy='joined')
3414
3414
3415 def __unicode__(self):
3415 def __unicode__(self):
3416 return u"<%s('%s[v%s]:%s')>" % (
3416 return u"<%s('%s[v%s]:%s')>" % (
3417 self.__class__.__name__,
3417 self.__class__.__name__,
3418 self.status, self.version, self.author
3418 self.status, self.version, self.author
3419 )
3419 )
3420
3420
3421 @classmethod
3421 @classmethod
3422 def get_status_lbl(cls, value):
3422 def get_status_lbl(cls, value):
3423 return dict(cls.STATUSES).get(value)
3423 return dict(cls.STATUSES).get(value)
3424
3424
3425 @property
3425 @property
3426 def status_lbl(self):
3426 def status_lbl(self):
3427 return ChangesetStatus.get_status_lbl(self.status)
3427 return ChangesetStatus.get_status_lbl(self.status)
3428
3428
3429 def get_api_data(self):
3429 def get_api_data(self):
3430 status = self
3430 status = self
3431 data = {
3431 data = {
3432 'status_id': status.changeset_status_id,
3432 'status_id': status.changeset_status_id,
3433 'status': status.status,
3433 'status': status.status,
3434 }
3434 }
3435 return data
3435 return data
3436
3436
3437 def __json__(self):
3437 def __json__(self):
3438 data = dict()
3438 data = dict()
3439 data.update(self.get_api_data())
3439 data.update(self.get_api_data())
3440 return data
3440 return data
3441
3441
3442
3442
3443 class _PullRequestBase(BaseModel):
3443 class _PullRequestBase(BaseModel):
3444 """
3444 """
3445 Common attributes of pull request and version entries.
3445 Common attributes of pull request and version entries.
3446 """
3446 """
3447
3447
3448 # .status values
3448 # .status values
3449 STATUS_NEW = u'new'
3449 STATUS_NEW = u'new'
3450 STATUS_OPEN = u'open'
3450 STATUS_OPEN = u'open'
3451 STATUS_CLOSED = u'closed'
3451 STATUS_CLOSED = u'closed'
3452
3452
3453 title = Column('title', Unicode(255), nullable=True)
3453 title = Column('title', Unicode(255), nullable=True)
3454 description = Column(
3454 description = Column(
3455 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3455 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3456 nullable=True)
3456 nullable=True)
3457 # new/open/closed status of pull request (not approve/reject/etc)
3457 # new/open/closed status of pull request (not approve/reject/etc)
3458 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3458 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3459 created_on = Column(
3459 created_on = Column(
3460 'created_on', DateTime(timezone=False), nullable=False,
3460 'created_on', DateTime(timezone=False), nullable=False,
3461 default=datetime.datetime.now)
3461 default=datetime.datetime.now)
3462 updated_on = Column(
3462 updated_on = Column(
3463 'updated_on', DateTime(timezone=False), nullable=False,
3463 'updated_on', DateTime(timezone=False), nullable=False,
3464 default=datetime.datetime.now)
3464 default=datetime.datetime.now)
3465
3465
3466 @declared_attr
3466 @declared_attr
3467 def user_id(cls):
3467 def user_id(cls):
3468 return Column(
3468 return Column(
3469 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3469 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3470 unique=None)
3470 unique=None)
3471
3471
3472 # 500 revisions max
3472 # 500 revisions max
3473 _revisions = Column(
3473 _revisions = Column(
3474 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3474 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3475
3475
3476 @declared_attr
3476 @declared_attr
3477 def source_repo_id(cls):
3477 def source_repo_id(cls):
3478 # TODO: dan: rename column to source_repo_id
3478 # TODO: dan: rename column to source_repo_id
3479 return Column(
3479 return Column(
3480 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3480 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3481 nullable=False)
3481 nullable=False)
3482
3482
3483 source_ref = Column('org_ref', Unicode(255), nullable=False)
3483 source_ref = Column('org_ref', Unicode(255), nullable=False)
3484
3484
3485 @declared_attr
3485 @declared_attr
3486 def target_repo_id(cls):
3486 def target_repo_id(cls):
3487 # TODO: dan: rename column to target_repo_id
3487 # TODO: dan: rename column to target_repo_id
3488 return Column(
3488 return Column(
3489 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3489 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3490 nullable=False)
3490 nullable=False)
3491
3491
3492 target_ref = Column('other_ref', Unicode(255), nullable=False)
3492 target_ref = Column('other_ref', Unicode(255), nullable=False)
3493 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3493 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3494
3494
3495 # TODO: dan: rename column to last_merge_source_rev
3495 # TODO: dan: rename column to last_merge_source_rev
3496 _last_merge_source_rev = Column(
3496 _last_merge_source_rev = Column(
3497 'last_merge_org_rev', String(40), nullable=True)
3497 'last_merge_org_rev', String(40), nullable=True)
3498 # TODO: dan: rename column to last_merge_target_rev
3498 # TODO: dan: rename column to last_merge_target_rev
3499 _last_merge_target_rev = Column(
3499 _last_merge_target_rev = Column(
3500 'last_merge_other_rev', String(40), nullable=True)
3500 'last_merge_other_rev', String(40), nullable=True)
3501 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3501 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3502 merge_rev = Column('merge_rev', String(40), nullable=True)
3502 merge_rev = Column('merge_rev', String(40), nullable=True)
3503
3503
3504 reviewer_data = Column(
3504 reviewer_data = Column(
3505 'reviewer_data_json', MutationObj.as_mutable(
3505 'reviewer_data_json', MutationObj.as_mutable(
3506 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3506 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3507
3507
3508 @property
3508 @property
3509 def reviewer_data_json(self):
3509 def reviewer_data_json(self):
3510 return json.dumps(self.reviewer_data)
3510 return json.dumps(self.reviewer_data)
3511
3511
3512 @hybrid_property
3512 @hybrid_property
3513 def description_safe(self):
3513 def description_safe(self):
3514 from rhodecode.lib import helpers as h
3514 from rhodecode.lib import helpers as h
3515 return h.escape(self.description)
3515 return h.escape(self.description)
3516
3516
3517 @hybrid_property
3517 @hybrid_property
3518 def revisions(self):
3518 def revisions(self):
3519 return self._revisions.split(':') if self._revisions else []
3519 return self._revisions.split(':') if self._revisions else []
3520
3520
3521 @revisions.setter
3521 @revisions.setter
3522 def revisions(self, val):
3522 def revisions(self, val):
3523 self._revisions = ':'.join(val)
3523 self._revisions = ':'.join(val)
3524
3524
3525 @hybrid_property
3525 @hybrid_property
3526 def last_merge_status(self):
3526 def last_merge_status(self):
3527 return safe_int(self._last_merge_status)
3527 return safe_int(self._last_merge_status)
3528
3528
3529 @last_merge_status.setter
3529 @last_merge_status.setter
3530 def last_merge_status(self, val):
3530 def last_merge_status(self, val):
3531 self._last_merge_status = val
3531 self._last_merge_status = val
3532
3532
3533 @declared_attr
3533 @declared_attr
3534 def author(cls):
3534 def author(cls):
3535 return relationship('User', lazy='joined')
3535 return relationship('User', lazy='joined')
3536
3536
3537 @declared_attr
3537 @declared_attr
3538 def source_repo(cls):
3538 def source_repo(cls):
3539 return relationship(
3539 return relationship(
3540 'Repository',
3540 'Repository',
3541 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3541 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3542
3542
3543 @property
3543 @property
3544 def source_ref_parts(self):
3544 def source_ref_parts(self):
3545 return self.unicode_to_reference(self.source_ref)
3545 return self.unicode_to_reference(self.source_ref)
3546
3546
3547 @declared_attr
3547 @declared_attr
3548 def target_repo(cls):
3548 def target_repo(cls):
3549 return relationship(
3549 return relationship(
3550 'Repository',
3550 'Repository',
3551 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3551 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3552
3552
3553 @property
3553 @property
3554 def target_ref_parts(self):
3554 def target_ref_parts(self):
3555 return self.unicode_to_reference(self.target_ref)
3555 return self.unicode_to_reference(self.target_ref)
3556
3556
3557 @property
3557 @property
3558 def shadow_merge_ref(self):
3558 def shadow_merge_ref(self):
3559 return self.unicode_to_reference(self._shadow_merge_ref)
3559 return self.unicode_to_reference(self._shadow_merge_ref)
3560
3560
3561 @shadow_merge_ref.setter
3561 @shadow_merge_ref.setter
3562 def shadow_merge_ref(self, ref):
3562 def shadow_merge_ref(self, ref):
3563 self._shadow_merge_ref = self.reference_to_unicode(ref)
3563 self._shadow_merge_ref = self.reference_to_unicode(ref)
3564
3564
3565 def unicode_to_reference(self, raw):
3565 def unicode_to_reference(self, raw):
3566 """
3566 """
3567 Convert a unicode (or string) to a reference object.
3567 Convert a unicode (or string) to a reference object.
3568 If unicode evaluates to False it returns None.
3568 If unicode evaluates to False it returns None.
3569 """
3569 """
3570 if raw:
3570 if raw:
3571 refs = raw.split(':')
3571 refs = raw.split(':')
3572 return Reference(*refs)
3572 return Reference(*refs)
3573 else:
3573 else:
3574 return None
3574 return None
3575
3575
3576 def reference_to_unicode(self, ref):
3576 def reference_to_unicode(self, ref):
3577 """
3577 """
3578 Convert a reference object to unicode.
3578 Convert a reference object to unicode.
3579 If reference is None it returns None.
3579 If reference is None it returns None.
3580 """
3580 """
3581 if ref:
3581 if ref:
3582 return u':'.join(ref)
3582 return u':'.join(ref)
3583 else:
3583 else:
3584 return None
3584 return None
3585
3585
3586 def get_api_data(self, with_merge_state=True):
3586 def get_api_data(self, with_merge_state=True):
3587 from rhodecode.model.pull_request import PullRequestModel
3587 from rhodecode.model.pull_request import PullRequestModel
3588
3588
3589 pull_request = self
3589 pull_request = self
3590 if with_merge_state:
3590 if with_merge_state:
3591 merge_status = PullRequestModel().merge_status(pull_request)
3591 merge_status = PullRequestModel().merge_status(pull_request)
3592 merge_state = {
3592 merge_state = {
3593 'status': merge_status[0],
3593 'status': merge_status[0],
3594 'message': safe_unicode(merge_status[1]),
3594 'message': safe_unicode(merge_status[1]),
3595 }
3595 }
3596 else:
3596 else:
3597 merge_state = {'status': 'not_available',
3597 merge_state = {'status': 'not_available',
3598 'message': 'not_available'}
3598 'message': 'not_available'}
3599
3599
3600 merge_data = {
3600 merge_data = {
3601 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3601 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3602 'reference': (
3602 'reference': (
3603 pull_request.shadow_merge_ref._asdict()
3603 pull_request.shadow_merge_ref._asdict()
3604 if pull_request.shadow_merge_ref else None),
3604 if pull_request.shadow_merge_ref else None),
3605 }
3605 }
3606
3606
3607 data = {
3607 data = {
3608 'pull_request_id': pull_request.pull_request_id,
3608 'pull_request_id': pull_request.pull_request_id,
3609 'url': PullRequestModel().get_url(pull_request),
3609 'url': PullRequestModel().get_url(pull_request),
3610 'title': pull_request.title,
3610 'title': pull_request.title,
3611 'description': pull_request.description,
3611 'description': pull_request.description,
3612 'status': pull_request.status,
3612 'status': pull_request.status,
3613 'created_on': pull_request.created_on,
3613 'created_on': pull_request.created_on,
3614 'updated_on': pull_request.updated_on,
3614 'updated_on': pull_request.updated_on,
3615 'commit_ids': pull_request.revisions,
3615 'commit_ids': pull_request.revisions,
3616 'review_status': pull_request.calculated_review_status(),
3616 'review_status': pull_request.calculated_review_status(),
3617 'mergeable': merge_state,
3617 'mergeable': merge_state,
3618 'source': {
3618 'source': {
3619 'clone_url': pull_request.source_repo.clone_url(),
3619 'clone_url': pull_request.source_repo.clone_url(),
3620 'repository': pull_request.source_repo.repo_name,
3620 'repository': pull_request.source_repo.repo_name,
3621 'reference': {
3621 'reference': {
3622 'name': pull_request.source_ref_parts.name,
3622 'name': pull_request.source_ref_parts.name,
3623 'type': pull_request.source_ref_parts.type,
3623 'type': pull_request.source_ref_parts.type,
3624 'commit_id': pull_request.source_ref_parts.commit_id,
3624 'commit_id': pull_request.source_ref_parts.commit_id,
3625 },
3625 },
3626 },
3626 },
3627 'target': {
3627 'target': {
3628 'clone_url': pull_request.target_repo.clone_url(),
3628 'clone_url': pull_request.target_repo.clone_url(),
3629 'repository': pull_request.target_repo.repo_name,
3629 'repository': pull_request.target_repo.repo_name,
3630 'reference': {
3630 'reference': {
3631 'name': pull_request.target_ref_parts.name,
3631 'name': pull_request.target_ref_parts.name,
3632 'type': pull_request.target_ref_parts.type,
3632 'type': pull_request.target_ref_parts.type,
3633 'commit_id': pull_request.target_ref_parts.commit_id,
3633 'commit_id': pull_request.target_ref_parts.commit_id,
3634 },
3634 },
3635 },
3635 },
3636 'merge': merge_data,
3636 'merge': merge_data,
3637 'author': pull_request.author.get_api_data(include_secrets=False,
3637 'author': pull_request.author.get_api_data(include_secrets=False,
3638 details='basic'),
3638 details='basic'),
3639 'reviewers': [
3639 'reviewers': [
3640 {
3640 {
3641 'user': reviewer.get_api_data(include_secrets=False,
3641 'user': reviewer.get_api_data(include_secrets=False,
3642 details='basic'),
3642 details='basic'),
3643 'reasons': reasons,
3643 'reasons': reasons,
3644 'review_status': st[0][1].status if st else 'not_reviewed',
3644 'review_status': st[0][1].status if st else 'not_reviewed',
3645 }
3645 }
3646 for obj, reviewer, reasons, mandatory, st in
3646 for obj, reviewer, reasons, mandatory, st in
3647 pull_request.reviewers_statuses()
3647 pull_request.reviewers_statuses()
3648 ]
3648 ]
3649 }
3649 }
3650
3650
3651 return data
3651 return data
3652
3652
3653
3653
3654 class PullRequest(Base, _PullRequestBase):
3654 class PullRequest(Base, _PullRequestBase):
3655 __tablename__ = 'pull_requests'
3655 __tablename__ = 'pull_requests'
3656 __table_args__ = (
3656 __table_args__ = (
3657 base_table_args,
3657 base_table_args,
3658 )
3658 )
3659
3659
3660 pull_request_id = Column(
3660 pull_request_id = Column(
3661 'pull_request_id', Integer(), nullable=False, primary_key=True)
3661 'pull_request_id', Integer(), nullable=False, primary_key=True)
3662
3662
3663 def __repr__(self):
3663 def __repr__(self):
3664 if self.pull_request_id:
3664 if self.pull_request_id:
3665 return '<DB:PullRequest #%s>' % self.pull_request_id
3665 return '<DB:PullRequest #%s>' % self.pull_request_id
3666 else:
3666 else:
3667 return '<DB:PullRequest at %#x>' % id(self)
3667 return '<DB:PullRequest at %#x>' % id(self)
3668
3668
3669 reviewers = relationship('PullRequestReviewers',
3669 reviewers = relationship('PullRequestReviewers',
3670 cascade="all, delete, delete-orphan")
3670 cascade="all, delete, delete-orphan")
3671 statuses = relationship('ChangesetStatus',
3671 statuses = relationship('ChangesetStatus',
3672 cascade="all, delete, delete-orphan")
3672 cascade="all, delete, delete-orphan")
3673 comments = relationship('ChangesetComment',
3673 comments = relationship('ChangesetComment',
3674 cascade="all, delete, delete-orphan")
3674 cascade="all, delete, delete-orphan")
3675 versions = relationship('PullRequestVersion',
3675 versions = relationship('PullRequestVersion',
3676 cascade="all, delete, delete-orphan",
3676 cascade="all, delete, delete-orphan",
3677 lazy='dynamic')
3677 lazy='dynamic')
3678
3678
3679 @classmethod
3679 @classmethod
3680 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3680 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3681 internal_methods=None):
3681 internal_methods=None):
3682
3682
3683 class PullRequestDisplay(object):
3683 class PullRequestDisplay(object):
3684 """
3684 """
3685 Special object wrapper for showing PullRequest data via Versions
3685 Special object wrapper for showing PullRequest data via Versions
3686 It mimics PR object as close as possible. This is read only object
3686 It mimics PR object as close as possible. This is read only object
3687 just for display
3687 just for display
3688 """
3688 """
3689
3689
3690 def __init__(self, attrs, internal=None):
3690 def __init__(self, attrs, internal=None):
3691 self.attrs = attrs
3691 self.attrs = attrs
3692 # internal have priority over the given ones via attrs
3692 # internal have priority over the given ones via attrs
3693 self.internal = internal or ['versions']
3693 self.internal = internal or ['versions']
3694
3694
3695 def __getattr__(self, item):
3695 def __getattr__(self, item):
3696 if item in self.internal:
3696 if item in self.internal:
3697 return getattr(self, item)
3697 return getattr(self, item)
3698 try:
3698 try:
3699 return self.attrs[item]
3699 return self.attrs[item]
3700 except KeyError:
3700 except KeyError:
3701 raise AttributeError(
3701 raise AttributeError(
3702 '%s object has no attribute %s' % (self, item))
3702 '%s object has no attribute %s' % (self, item))
3703
3703
3704 def __repr__(self):
3704 def __repr__(self):
3705 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3705 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3706
3706
3707 def versions(self):
3707 def versions(self):
3708 return pull_request_obj.versions.order_by(
3708 return pull_request_obj.versions.order_by(
3709 PullRequestVersion.pull_request_version_id).all()
3709 PullRequestVersion.pull_request_version_id).all()
3710
3710
3711 def is_closed(self):
3711 def is_closed(self):
3712 return pull_request_obj.is_closed()
3712 return pull_request_obj.is_closed()
3713
3713
3714 @property
3714 @property
3715 def pull_request_version_id(self):
3715 def pull_request_version_id(self):
3716 return getattr(pull_request_obj, 'pull_request_version_id', None)
3716 return getattr(pull_request_obj, 'pull_request_version_id', None)
3717
3717
3718 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3718 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3719
3719
3720 attrs.author = StrictAttributeDict(
3720 attrs.author = StrictAttributeDict(
3721 pull_request_obj.author.get_api_data())
3721 pull_request_obj.author.get_api_data())
3722 if pull_request_obj.target_repo:
3722 if pull_request_obj.target_repo:
3723 attrs.target_repo = StrictAttributeDict(
3723 attrs.target_repo = StrictAttributeDict(
3724 pull_request_obj.target_repo.get_api_data())
3724 pull_request_obj.target_repo.get_api_data())
3725 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3725 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3726
3726
3727 if pull_request_obj.source_repo:
3727 if pull_request_obj.source_repo:
3728 attrs.source_repo = StrictAttributeDict(
3728 attrs.source_repo = StrictAttributeDict(
3729 pull_request_obj.source_repo.get_api_data())
3729 pull_request_obj.source_repo.get_api_data())
3730 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3730 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3731
3731
3732 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3732 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3733 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3733 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3734 attrs.revisions = pull_request_obj.revisions
3734 attrs.revisions = pull_request_obj.revisions
3735
3735
3736 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3736 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3737 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3737 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3738 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3738 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3739
3739
3740 return PullRequestDisplay(attrs, internal=internal_methods)
3740 return PullRequestDisplay(attrs, internal=internal_methods)
3741
3741
3742 def is_closed(self):
3742 def is_closed(self):
3743 return self.status == self.STATUS_CLOSED
3743 return self.status == self.STATUS_CLOSED
3744
3744
3745 def __json__(self):
3745 def __json__(self):
3746 return {
3746 return {
3747 'revisions': self.revisions,
3747 'revisions': self.revisions,
3748 }
3748 }
3749
3749
3750 def calculated_review_status(self):
3750 def calculated_review_status(self):
3751 from rhodecode.model.changeset_status import ChangesetStatusModel
3751 from rhodecode.model.changeset_status import ChangesetStatusModel
3752 return ChangesetStatusModel().calculated_review_status(self)
3752 return ChangesetStatusModel().calculated_review_status(self)
3753
3753
3754 def reviewers_statuses(self):
3754 def reviewers_statuses(self):
3755 from rhodecode.model.changeset_status import ChangesetStatusModel
3755 from rhodecode.model.changeset_status import ChangesetStatusModel
3756 return ChangesetStatusModel().reviewers_statuses(self)
3756 return ChangesetStatusModel().reviewers_statuses(self)
3757
3757
3758 @property
3758 @property
3759 def workspace_id(self):
3759 def workspace_id(self):
3760 from rhodecode.model.pull_request import PullRequestModel
3760 from rhodecode.model.pull_request import PullRequestModel
3761 return PullRequestModel()._workspace_id(self)
3761 return PullRequestModel()._workspace_id(self)
3762
3762
3763 def get_shadow_repo(self):
3763 def get_shadow_repo(self):
3764 workspace_id = self.workspace_id
3764 workspace_id = self.workspace_id
3765 vcs_obj = self.target_repo.scm_instance()
3765 vcs_obj = self.target_repo.scm_instance()
3766 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3766 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3767 self.target_repo.repo_id, workspace_id)
3767 self.target_repo.repo_id, workspace_id)
3768 if os.path.isdir(shadow_repository_path):
3768 if os.path.isdir(shadow_repository_path):
3769 return vcs_obj._get_shadow_instance(shadow_repository_path)
3769 return vcs_obj._get_shadow_instance(shadow_repository_path)
3770
3770
3771
3771
3772 class PullRequestVersion(Base, _PullRequestBase):
3772 class PullRequestVersion(Base, _PullRequestBase):
3773 __tablename__ = 'pull_request_versions'
3773 __tablename__ = 'pull_request_versions'
3774 __table_args__ = (
3774 __table_args__ = (
3775 base_table_args,
3775 base_table_args,
3776 )
3776 )
3777
3777
3778 pull_request_version_id = Column(
3778 pull_request_version_id = Column(
3779 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3779 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3780 pull_request_id = Column(
3780 pull_request_id = Column(
3781 'pull_request_id', Integer(),
3781 'pull_request_id', Integer(),
3782 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3782 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3783 pull_request = relationship('PullRequest')
3783 pull_request = relationship('PullRequest')
3784
3784
3785 def __repr__(self):
3785 def __repr__(self):
3786 if self.pull_request_version_id:
3786 if self.pull_request_version_id:
3787 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3787 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3788 else:
3788 else:
3789 return '<DB:PullRequestVersion at %#x>' % id(self)
3789 return '<DB:PullRequestVersion at %#x>' % id(self)
3790
3790
3791 @property
3791 @property
3792 def reviewers(self):
3792 def reviewers(self):
3793 return self.pull_request.reviewers
3793 return self.pull_request.reviewers
3794
3794
3795 @property
3795 @property
3796 def versions(self):
3796 def versions(self):
3797 return self.pull_request.versions
3797 return self.pull_request.versions
3798
3798
3799 def is_closed(self):
3799 def is_closed(self):
3800 # calculate from original
3800 # calculate from original
3801 return self.pull_request.status == self.STATUS_CLOSED
3801 return self.pull_request.status == self.STATUS_CLOSED
3802
3802
3803 def calculated_review_status(self):
3803 def calculated_review_status(self):
3804 return self.pull_request.calculated_review_status()
3804 return self.pull_request.calculated_review_status()
3805
3805
3806 def reviewers_statuses(self):
3806 def reviewers_statuses(self):
3807 return self.pull_request.reviewers_statuses()
3807 return self.pull_request.reviewers_statuses()
3808
3808
3809
3809
3810 class PullRequestReviewers(Base, BaseModel):
3810 class PullRequestReviewers(Base, BaseModel):
3811 __tablename__ = 'pull_request_reviewers'
3811 __tablename__ = 'pull_request_reviewers'
3812 __table_args__ = (
3812 __table_args__ = (
3813 base_table_args,
3813 base_table_args,
3814 )
3814 )
3815
3815
3816 @hybrid_property
3816 @hybrid_property
3817 def reasons(self):
3817 def reasons(self):
3818 if not self._reasons:
3818 if not self._reasons:
3819 return []
3819 return []
3820 return self._reasons
3820 return self._reasons
3821
3821
3822 @reasons.setter
3822 @reasons.setter
3823 def reasons(self, val):
3823 def reasons(self, val):
3824 val = val or []
3824 val = val or []
3825 if any(not isinstance(x, basestring) for x in val):
3825 if any(not isinstance(x, basestring) for x in val):
3826 raise Exception('invalid reasons type, must be list of strings')
3826 raise Exception('invalid reasons type, must be list of strings')
3827 self._reasons = val
3827 self._reasons = val
3828
3828
3829 pull_requests_reviewers_id = Column(
3829 pull_requests_reviewers_id = Column(
3830 'pull_requests_reviewers_id', Integer(), nullable=False,
3830 'pull_requests_reviewers_id', Integer(), nullable=False,
3831 primary_key=True)
3831 primary_key=True)
3832 pull_request_id = Column(
3832 pull_request_id = Column(
3833 "pull_request_id", Integer(),
3833 "pull_request_id", Integer(),
3834 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3834 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3835 user_id = Column(
3835 user_id = Column(
3836 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3836 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3837 _reasons = Column(
3837 _reasons = Column(
3838 'reason', MutationList.as_mutable(
3838 'reason', MutationList.as_mutable(
3839 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3839 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3840
3840
3841 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3841 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3842 user = relationship('User')
3842 user = relationship('User')
3843 pull_request = relationship('PullRequest')
3843 pull_request = relationship('PullRequest')
3844
3844
3845 rule_data = Column(
3845 rule_data = Column(
3846 'rule_data_json',
3846 'rule_data_json',
3847 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3847 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3848
3848
3849 def rule_user_group_data(self):
3849 def rule_user_group_data(self):
3850 """
3850 """
3851 Returns the voting user group rule data for this reviewer
3851 Returns the voting user group rule data for this reviewer
3852 """
3852 """
3853
3853
3854 if self.rule_data and 'vote_rule' in self.rule_data:
3854 if self.rule_data and 'vote_rule' in self.rule_data:
3855 user_group_data = {}
3855 user_group_data = {}
3856 if 'rule_user_group_entry_id' in self.rule_data:
3856 if 'rule_user_group_entry_id' in self.rule_data:
3857 # means a group with voting rules !
3857 # means a group with voting rules !
3858 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3858 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3859 user_group_data['name'] = self.rule_data['rule_name']
3859 user_group_data['name'] = self.rule_data['rule_name']
3860 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3860 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3861
3861
3862 return user_group_data
3862 return user_group_data
3863
3863
3864 def __unicode__(self):
3864 def __unicode__(self):
3865 return u"<%s('id:%s')>" % (self.__class__.__name__,
3865 return u"<%s('id:%s')>" % (self.__class__.__name__,
3866 self.pull_requests_reviewers_id)
3866 self.pull_requests_reviewers_id)
3867
3867
3868
3868
3869 class Notification(Base, BaseModel):
3869 class Notification(Base, BaseModel):
3870 __tablename__ = 'notifications'
3870 __tablename__ = 'notifications'
3871 __table_args__ = (
3871 __table_args__ = (
3872 Index('notification_type_idx', 'type'),
3872 Index('notification_type_idx', 'type'),
3873 base_table_args,
3873 base_table_args,
3874 )
3874 )
3875
3875
3876 TYPE_CHANGESET_COMMENT = u'cs_comment'
3876 TYPE_CHANGESET_COMMENT = u'cs_comment'
3877 TYPE_MESSAGE = u'message'
3877 TYPE_MESSAGE = u'message'
3878 TYPE_MENTION = u'mention'
3878 TYPE_MENTION = u'mention'
3879 TYPE_REGISTRATION = u'registration'
3879 TYPE_REGISTRATION = u'registration'
3880 TYPE_PULL_REQUEST = u'pull_request'
3880 TYPE_PULL_REQUEST = u'pull_request'
3881 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3881 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3882
3882
3883 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3883 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3884 subject = Column('subject', Unicode(512), nullable=True)
3884 subject = Column('subject', Unicode(512), nullable=True)
3885 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3885 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3886 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3886 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3887 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3887 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3888 type_ = Column('type', Unicode(255))
3888 type_ = Column('type', Unicode(255))
3889
3889
3890 created_by_user = relationship('User')
3890 created_by_user = relationship('User')
3891 notifications_to_users = relationship('UserNotification', lazy='joined',
3891 notifications_to_users = relationship('UserNotification', lazy='joined',
3892 cascade="all, delete, delete-orphan")
3892 cascade="all, delete, delete-orphan")
3893
3893
3894 @property
3894 @property
3895 def recipients(self):
3895 def recipients(self):
3896 return [x.user for x in UserNotification.query()\
3896 return [x.user for x in UserNotification.query()\
3897 .filter(UserNotification.notification == self)\
3897 .filter(UserNotification.notification == self)\
3898 .order_by(UserNotification.user_id.asc()).all()]
3898 .order_by(UserNotification.user_id.asc()).all()]
3899
3899
3900 @classmethod
3900 @classmethod
3901 def create(cls, created_by, subject, body, recipients, type_=None):
3901 def create(cls, created_by, subject, body, recipients, type_=None):
3902 if type_ is None:
3902 if type_ is None:
3903 type_ = Notification.TYPE_MESSAGE
3903 type_ = Notification.TYPE_MESSAGE
3904
3904
3905 notification = cls()
3905 notification = cls()
3906 notification.created_by_user = created_by
3906 notification.created_by_user = created_by
3907 notification.subject = subject
3907 notification.subject = subject
3908 notification.body = body
3908 notification.body = body
3909 notification.type_ = type_
3909 notification.type_ = type_
3910 notification.created_on = datetime.datetime.now()
3910 notification.created_on = datetime.datetime.now()
3911
3911
3912 for u in recipients:
3912 for u in recipients:
3913 assoc = UserNotification()
3913 assoc = UserNotification()
3914 assoc.notification = notification
3914 assoc.notification = notification
3915
3915
3916 # if created_by is inside recipients mark his notification
3916 # if created_by is inside recipients mark his notification
3917 # as read
3917 # as read
3918 if u.user_id == created_by.user_id:
3918 if u.user_id == created_by.user_id:
3919 assoc.read = True
3919 assoc.read = True
3920
3920
3921 u.notifications.append(assoc)
3921 u.notifications.append(assoc)
3922 Session().add(notification)
3922 Session().add(notification)
3923
3923
3924 return notification
3924 return notification
3925
3925
3926
3926
3927 class UserNotification(Base, BaseModel):
3927 class UserNotification(Base, BaseModel):
3928 __tablename__ = 'user_to_notification'
3928 __tablename__ = 'user_to_notification'
3929 __table_args__ = (
3929 __table_args__ = (
3930 UniqueConstraint('user_id', 'notification_id'),
3930 UniqueConstraint('user_id', 'notification_id'),
3931 base_table_args
3931 base_table_args
3932 )
3932 )
3933
3933
3934 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3934 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3935 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3935 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3936 read = Column('read', Boolean, default=False)
3936 read = Column('read', Boolean, default=False)
3937 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3937 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3938
3938
3939 user = relationship('User', lazy="joined")
3939 user = relationship('User', lazy="joined")
3940 notification = relationship('Notification', lazy="joined",
3940 notification = relationship('Notification', lazy="joined",
3941 order_by=lambda: Notification.created_on.desc(),)
3941 order_by=lambda: Notification.created_on.desc(),)
3942
3942
3943 def mark_as_read(self):
3943 def mark_as_read(self):
3944 self.read = True
3944 self.read = True
3945 Session().add(self)
3945 Session().add(self)
3946
3946
3947
3947
3948 class Gist(Base, BaseModel):
3948 class Gist(Base, BaseModel):
3949 __tablename__ = 'gists'
3949 __tablename__ = 'gists'
3950 __table_args__ = (
3950 __table_args__ = (
3951 Index('g_gist_access_id_idx', 'gist_access_id'),
3951 Index('g_gist_access_id_idx', 'gist_access_id'),
3952 Index('g_created_on_idx', 'created_on'),
3952 Index('g_created_on_idx', 'created_on'),
3953 base_table_args
3953 base_table_args
3954 )
3954 )
3955
3955
3956 GIST_PUBLIC = u'public'
3956 GIST_PUBLIC = u'public'
3957 GIST_PRIVATE = u'private'
3957 GIST_PRIVATE = u'private'
3958 DEFAULT_FILENAME = u'gistfile1.txt'
3958 DEFAULT_FILENAME = u'gistfile1.txt'
3959
3959
3960 ACL_LEVEL_PUBLIC = u'acl_public'
3960 ACL_LEVEL_PUBLIC = u'acl_public'
3961 ACL_LEVEL_PRIVATE = u'acl_private'
3961 ACL_LEVEL_PRIVATE = u'acl_private'
3962
3962
3963 gist_id = Column('gist_id', Integer(), primary_key=True)
3963 gist_id = Column('gist_id', Integer(), primary_key=True)
3964 gist_access_id = Column('gist_access_id', Unicode(250))
3964 gist_access_id = Column('gist_access_id', Unicode(250))
3965 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3965 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3966 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3966 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3967 gist_expires = Column('gist_expires', Float(53), nullable=False)
3967 gist_expires = Column('gist_expires', Float(53), nullable=False)
3968 gist_type = Column('gist_type', Unicode(128), nullable=False)
3968 gist_type = Column('gist_type', Unicode(128), nullable=False)
3969 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3969 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3970 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3970 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3971 acl_level = Column('acl_level', Unicode(128), nullable=True)
3971 acl_level = Column('acl_level', Unicode(128), nullable=True)
3972
3972
3973 owner = relationship('User')
3973 owner = relationship('User')
3974
3974
3975 def __repr__(self):
3975 def __repr__(self):
3976 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3976 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3977
3977
3978 @hybrid_property
3978 @hybrid_property
3979 def description_safe(self):
3979 def description_safe(self):
3980 from rhodecode.lib import helpers as h
3980 from rhodecode.lib import helpers as h
3981 return h.escape(self.gist_description)
3981 return h.escape(self.gist_description)
3982
3982
3983 @classmethod
3983 @classmethod
3984 def get_or_404(cls, id_):
3984 def get_or_404(cls, id_):
3985 from pyramid.httpexceptions import HTTPNotFound
3985 from pyramid.httpexceptions import HTTPNotFound
3986
3986
3987 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3987 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3988 if not res:
3988 if not res:
3989 raise HTTPNotFound()
3989 raise HTTPNotFound()
3990 return res
3990 return res
3991
3991
3992 @classmethod
3992 @classmethod
3993 def get_by_access_id(cls, gist_access_id):
3993 def get_by_access_id(cls, gist_access_id):
3994 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3994 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3995
3995
3996 def gist_url(self):
3996 def gist_url(self):
3997 from rhodecode.model.gist import GistModel
3997 from rhodecode.model.gist import GistModel
3998 return GistModel().get_url(self)
3998 return GistModel().get_url(self)
3999
3999
4000 @classmethod
4000 @classmethod
4001 def base_path(cls):
4001 def base_path(cls):
4002 """
4002 """
4003 Returns base path when all gists are stored
4003 Returns base path when all gists are stored
4004
4004
4005 :param cls:
4005 :param cls:
4006 """
4006 """
4007 from rhodecode.model.gist import GIST_STORE_LOC
4007 from rhodecode.model.gist import GIST_STORE_LOC
4008 q = Session().query(RhodeCodeUi)\
4008 q = Session().query(RhodeCodeUi)\
4009 .filter(RhodeCodeUi.ui_key == URL_SEP)
4009 .filter(RhodeCodeUi.ui_key == URL_SEP)
4010 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4010 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4011 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4011 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4012
4012
4013 def get_api_data(self):
4013 def get_api_data(self):
4014 """
4014 """
4015 Common function for generating gist related data for API
4015 Common function for generating gist related data for API
4016 """
4016 """
4017 gist = self
4017 gist = self
4018 data = {
4018 data = {
4019 'gist_id': gist.gist_id,
4019 'gist_id': gist.gist_id,
4020 'type': gist.gist_type,
4020 'type': gist.gist_type,
4021 'access_id': gist.gist_access_id,
4021 'access_id': gist.gist_access_id,
4022 'description': gist.gist_description,
4022 'description': gist.gist_description,
4023 'url': gist.gist_url(),
4023 'url': gist.gist_url(),
4024 'expires': gist.gist_expires,
4024 'expires': gist.gist_expires,
4025 'created_on': gist.created_on,
4025 'created_on': gist.created_on,
4026 'modified_at': gist.modified_at,
4026 'modified_at': gist.modified_at,
4027 'content': None,
4027 'content': None,
4028 'acl_level': gist.acl_level,
4028 'acl_level': gist.acl_level,
4029 }
4029 }
4030 return data
4030 return data
4031
4031
4032 def __json__(self):
4032 def __json__(self):
4033 data = dict(
4033 data = dict(
4034 )
4034 )
4035 data.update(self.get_api_data())
4035 data.update(self.get_api_data())
4036 return data
4036 return data
4037 # SCM functions
4037 # SCM functions
4038
4038
4039 def scm_instance(self, **kwargs):
4039 def scm_instance(self, **kwargs):
4040 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4040 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4041 return get_vcs_instance(
4041 return get_vcs_instance(
4042 repo_path=safe_str(full_repo_path), create=False)
4042 repo_path=safe_str(full_repo_path), create=False)
4043
4043
4044
4044
4045 class ExternalIdentity(Base, BaseModel):
4045 class ExternalIdentity(Base, BaseModel):
4046 __tablename__ = 'external_identities'
4046 __tablename__ = 'external_identities'
4047 __table_args__ = (
4047 __table_args__ = (
4048 Index('local_user_id_idx', 'local_user_id'),
4048 Index('local_user_id_idx', 'local_user_id'),
4049 Index('external_id_idx', 'external_id'),
4049 Index('external_id_idx', 'external_id'),
4050 base_table_args
4050 base_table_args
4051 )
4051 )
4052
4052
4053 external_id = Column('external_id', Unicode(255), default=u'',
4053 external_id = Column('external_id', Unicode(255), default=u'',
4054 primary_key=True)
4054 primary_key=True)
4055 external_username = Column('external_username', Unicode(1024), default=u'')
4055 external_username = Column('external_username', Unicode(1024), default=u'')
4056 local_user_id = Column('local_user_id', Integer(),
4056 local_user_id = Column('local_user_id', Integer(),
4057 ForeignKey('users.user_id'), primary_key=True)
4057 ForeignKey('users.user_id'), primary_key=True)
4058 provider_name = Column('provider_name', Unicode(255), default=u'',
4058 provider_name = Column('provider_name', Unicode(255), default=u'',
4059 primary_key=True)
4059 primary_key=True)
4060 access_token = Column('access_token', String(1024), default=u'')
4060 access_token = Column('access_token', String(1024), default=u'')
4061 alt_token = Column('alt_token', String(1024), default=u'')
4061 alt_token = Column('alt_token', String(1024), default=u'')
4062 token_secret = Column('token_secret', String(1024), default=u'')
4062 token_secret = Column('token_secret', String(1024), default=u'')
4063
4063
4064 @classmethod
4064 @classmethod
4065 def by_external_id_and_provider(cls, external_id, provider_name,
4065 def by_external_id_and_provider(cls, external_id, provider_name,
4066 local_user_id=None):
4066 local_user_id=None):
4067 """
4067 """
4068 Returns ExternalIdentity instance based on search params
4068 Returns ExternalIdentity instance based on search params
4069
4069
4070 :param external_id:
4070 :param external_id:
4071 :param provider_name:
4071 :param provider_name:
4072 :return: ExternalIdentity
4072 :return: ExternalIdentity
4073 """
4073 """
4074 query = cls.query()
4074 query = cls.query()
4075 query = query.filter(cls.external_id == external_id)
4075 query = query.filter(cls.external_id == external_id)
4076 query = query.filter(cls.provider_name == provider_name)
4076 query = query.filter(cls.provider_name == provider_name)
4077 if local_user_id:
4077 if local_user_id:
4078 query = query.filter(cls.local_user_id == local_user_id)
4078 query = query.filter(cls.local_user_id == local_user_id)
4079 return query.first()
4079 return query.first()
4080
4080
4081 @classmethod
4081 @classmethod
4082 def user_by_external_id_and_provider(cls, external_id, provider_name):
4082 def user_by_external_id_and_provider(cls, external_id, provider_name):
4083 """
4083 """
4084 Returns User instance based on search params
4084 Returns User instance based on search params
4085
4085
4086 :param external_id:
4086 :param external_id:
4087 :param provider_name:
4087 :param provider_name:
4088 :return: User
4088 :return: User
4089 """
4089 """
4090 query = User.query()
4090 query = User.query()
4091 query = query.filter(cls.external_id == external_id)
4091 query = query.filter(cls.external_id == external_id)
4092 query = query.filter(cls.provider_name == provider_name)
4092 query = query.filter(cls.provider_name == provider_name)
4093 query = query.filter(User.user_id == cls.local_user_id)
4093 query = query.filter(User.user_id == cls.local_user_id)
4094 return query.first()
4094 return query.first()
4095
4095
4096 @classmethod
4096 @classmethod
4097 def by_local_user_id(cls, local_user_id):
4097 def by_local_user_id(cls, local_user_id):
4098 """
4098 """
4099 Returns all tokens for user
4099 Returns all tokens for user
4100
4100
4101 :param local_user_id:
4101 :param local_user_id:
4102 :return: ExternalIdentity
4102 :return: ExternalIdentity
4103 """
4103 """
4104 query = cls.query()
4104 query = cls.query()
4105 query = query.filter(cls.local_user_id == local_user_id)
4105 query = query.filter(cls.local_user_id == local_user_id)
4106 return query
4106 return query
4107
4107
4108
4108
4109 class Integration(Base, BaseModel):
4109 class Integration(Base, BaseModel):
4110 __tablename__ = 'integrations'
4110 __tablename__ = 'integrations'
4111 __table_args__ = (
4111 __table_args__ = (
4112 base_table_args
4112 base_table_args
4113 )
4113 )
4114
4114
4115 integration_id = Column('integration_id', Integer(), primary_key=True)
4115 integration_id = Column('integration_id', Integer(), primary_key=True)
4116 integration_type = Column('integration_type', String(255))
4116 integration_type = Column('integration_type', String(255))
4117 enabled = Column('enabled', Boolean(), nullable=False)
4117 enabled = Column('enabled', Boolean(), nullable=False)
4118 name = Column('name', String(255), nullable=False)
4118 name = Column('name', String(255), nullable=False)
4119 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4119 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4120 default=False)
4120 default=False)
4121
4121
4122 settings = Column(
4122 settings = Column(
4123 'settings_json', MutationObj.as_mutable(
4123 'settings_json', MutationObj.as_mutable(
4124 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4124 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4125 repo_id = Column(
4125 repo_id = Column(
4126 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4126 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4127 nullable=True, unique=None, default=None)
4127 nullable=True, unique=None, default=None)
4128 repo = relationship('Repository', lazy='joined')
4128 repo = relationship('Repository', lazy='joined')
4129
4129
4130 repo_group_id = Column(
4130 repo_group_id = Column(
4131 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4131 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4132 nullable=True, unique=None, default=None)
4132 nullable=True, unique=None, default=None)
4133 repo_group = relationship('RepoGroup', lazy='joined')
4133 repo_group = relationship('RepoGroup', lazy='joined')
4134
4134
4135 @property
4135 @property
4136 def scope(self):
4136 def scope(self):
4137 if self.repo:
4137 if self.repo:
4138 return repr(self.repo)
4138 return repr(self.repo)
4139 if self.repo_group:
4139 if self.repo_group:
4140 if self.child_repos_only:
4140 if self.child_repos_only:
4141 return repr(self.repo_group) + ' (child repos only)'
4141 return repr(self.repo_group) + ' (child repos only)'
4142 else:
4142 else:
4143 return repr(self.repo_group) + ' (recursive)'
4143 return repr(self.repo_group) + ' (recursive)'
4144 if self.child_repos_only:
4144 if self.child_repos_only:
4145 return 'root_repos'
4145 return 'root_repos'
4146 return 'global'
4146 return 'global'
4147
4147
4148 def __repr__(self):
4148 def __repr__(self):
4149 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4149 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4150
4150
4151
4151
4152 class RepoReviewRuleUser(Base, BaseModel):
4152 class RepoReviewRuleUser(Base, BaseModel):
4153 __tablename__ = 'repo_review_rules_users'
4153 __tablename__ = 'repo_review_rules_users'
4154 __table_args__ = (
4154 __table_args__ = (
4155 base_table_args
4155 base_table_args
4156 )
4156 )
4157
4157
4158 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4158 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4159 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4159 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4160 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4160 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4161 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4161 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4162 user = relationship('User')
4162 user = relationship('User')
4163
4163
4164 def rule_data(self):
4164 def rule_data(self):
4165 return {
4165 return {
4166 'mandatory': self.mandatory
4166 'mandatory': self.mandatory
4167 }
4167 }
4168
4168
4169
4169
4170 class RepoReviewRuleUserGroup(Base, BaseModel):
4170 class RepoReviewRuleUserGroup(Base, BaseModel):
4171 __tablename__ = 'repo_review_rules_users_groups'
4171 __tablename__ = 'repo_review_rules_users_groups'
4172 __table_args__ = (
4172 __table_args__ = (
4173 base_table_args
4173 base_table_args
4174 )
4174 )
4175
4175
4176 VOTE_RULE_ALL = -1
4176 VOTE_RULE_ALL = -1
4177
4177
4178 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4178 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4179 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4179 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4180 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4180 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4181 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4181 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4182 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4182 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4183 users_group = relationship('UserGroup')
4183 users_group = relationship('UserGroup')
4184
4184
4185 def rule_data(self):
4185 def rule_data(self):
4186 return {
4186 return {
4187 'mandatory': self.mandatory,
4187 'mandatory': self.mandatory,
4188 'vote_rule': self.vote_rule
4188 'vote_rule': self.vote_rule
4189 }
4189 }
4190
4190
4191 @property
4191 @property
4192 def vote_rule_label(self):
4192 def vote_rule_label(self):
4193 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4193 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4194 return 'all must vote'
4194 return 'all must vote'
4195 else:
4195 else:
4196 return 'min. vote {}'.format(self.vote_rule)
4196 return 'min. vote {}'.format(self.vote_rule)
4197
4197
4198
4198
4199 class RepoReviewRule(Base, BaseModel):
4199 class RepoReviewRule(Base, BaseModel):
4200 __tablename__ = 'repo_review_rules'
4200 __tablename__ = 'repo_review_rules'
4201 __table_args__ = (
4201 __table_args__ = (
4202 base_table_args
4202 base_table_args
4203 )
4203 )
4204
4204
4205 repo_review_rule_id = Column(
4205 repo_review_rule_id = Column(
4206 'repo_review_rule_id', Integer(), primary_key=True)
4206 'repo_review_rule_id', Integer(), primary_key=True)
4207 repo_id = Column(
4207 repo_id = Column(
4208 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4208 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4209 repo = relationship('Repository', backref='review_rules')
4209 repo = relationship('Repository', backref='review_rules')
4210
4210
4211 review_rule_name = Column('review_rule_name', String(255))
4211 review_rule_name = Column('review_rule_name', String(255))
4212 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4212 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4213 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4213 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4214 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4214 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4215
4215
4216 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4216 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4217 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4217 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4218 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4218 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4219 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4219 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4220
4220
4221 rule_users = relationship('RepoReviewRuleUser')
4221 rule_users = relationship('RepoReviewRuleUser')
4222 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4222 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4223
4223
4224 def _validate_pattern(self, value):
4224 def _validate_pattern(self, value):
4225 re.compile('^' + glob2re(value) + '$')
4225 re.compile('^' + glob2re(value) + '$')
4226
4226
4227 @hybrid_property
4227 @hybrid_property
4228 def source_branch_pattern(self):
4228 def source_branch_pattern(self):
4229 return self._branch_pattern or '*'
4229 return self._branch_pattern or '*'
4230
4230
4231 @source_branch_pattern.setter
4231 @source_branch_pattern.setter
4232 def source_branch_pattern(self, value):
4232 def source_branch_pattern(self, value):
4233 self._validate_pattern(value)
4233 self._validate_pattern(value)
4234 self._branch_pattern = value or '*'
4234 self._branch_pattern = value or '*'
4235
4235
4236 @hybrid_property
4236 @hybrid_property
4237 def target_branch_pattern(self):
4237 def target_branch_pattern(self):
4238 return self._target_branch_pattern or '*'
4238 return self._target_branch_pattern or '*'
4239
4239
4240 @target_branch_pattern.setter
4240 @target_branch_pattern.setter
4241 def target_branch_pattern(self, value):
4241 def target_branch_pattern(self, value):
4242 self._validate_pattern(value)
4242 self._validate_pattern(value)
4243 self._target_branch_pattern = value or '*'
4243 self._target_branch_pattern = value or '*'
4244
4244
4245 @hybrid_property
4245 @hybrid_property
4246 def file_pattern(self):
4246 def file_pattern(self):
4247 return self._file_pattern or '*'
4247 return self._file_pattern or '*'
4248
4248
4249 @file_pattern.setter
4249 @file_pattern.setter
4250 def file_pattern(self, value):
4250 def file_pattern(self, value):
4251 self._validate_pattern(value)
4251 self._validate_pattern(value)
4252 self._file_pattern = value or '*'
4252 self._file_pattern = value or '*'
4253
4253
4254 def matches(self, source_branch, target_branch, files_changed):
4254 def matches(self, source_branch, target_branch, files_changed):
4255 """
4255 """
4256 Check if this review rule matches a branch/files in a pull request
4256 Check if this review rule matches a branch/files in a pull request
4257
4257
4258 :param source_branch: source branch name for the commit
4258 :param source_branch: source branch name for the commit
4259 :param target_branch: target branch name for the commit
4259 :param target_branch: target branch name for the commit
4260 :param files_changed: list of file paths changed in the pull request
4260 :param files_changed: list of file paths changed in the pull request
4261 """
4261 """
4262
4262
4263 source_branch = source_branch or ''
4263 source_branch = source_branch or ''
4264 target_branch = target_branch or ''
4264 target_branch = target_branch or ''
4265 files_changed = files_changed or []
4265 files_changed = files_changed or []
4266
4266
4267 branch_matches = True
4267 branch_matches = True
4268 if source_branch or target_branch:
4268 if source_branch or target_branch:
4269 if self.source_branch_pattern == '*':
4269 if self.source_branch_pattern == '*':
4270 source_branch_match = True
4270 source_branch_match = True
4271 else:
4271 else:
4272 if self.source_branch_pattern.startswith('re:'):
4272 if self.source_branch_pattern.startswith('re:'):
4273 source_pattern = self.source_branch_pattern[3:]
4273 source_pattern = self.source_branch_pattern[3:]
4274 else:
4274 else:
4275 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4275 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4276 source_branch_regex = re.compile(source_pattern)
4276 source_branch_regex = re.compile(source_pattern)
4277 source_branch_match = bool(source_branch_regex.search(source_branch))
4277 source_branch_match = bool(source_branch_regex.search(source_branch))
4278 if self.target_branch_pattern == '*':
4278 if self.target_branch_pattern == '*':
4279 target_branch_match = True
4279 target_branch_match = True
4280 else:
4280 else:
4281 if self.target_branch_pattern.startswith('re:'):
4281 if self.target_branch_pattern.startswith('re:'):
4282 target_pattern = self.target_branch_pattern[3:]
4282 target_pattern = self.target_branch_pattern[3:]
4283 else:
4283 else:
4284 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4284 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4285 target_branch_regex = re.compile(target_pattern)
4285 target_branch_regex = re.compile(target_pattern)
4286 target_branch_match = bool(target_branch_regex.search(target_branch))
4286 target_branch_match = bool(target_branch_regex.search(target_branch))
4287
4287
4288 branch_matches = source_branch_match and target_branch_match
4288 branch_matches = source_branch_match and target_branch_match
4289
4289
4290 files_matches = True
4290 files_matches = True
4291 if self.file_pattern != '*':
4291 if self.file_pattern != '*':
4292 files_matches = False
4292 files_matches = False
4293 if self.file_pattern.startswith('re:'):
4293 if self.file_pattern.startswith('re:'):
4294 file_pattern = self.file_pattern[3:]
4294 file_pattern = self.file_pattern[3:]
4295 else:
4295 else:
4296 file_pattern = glob2re(self.file_pattern)
4296 file_pattern = glob2re(self.file_pattern)
4297 file_regex = re.compile(file_pattern)
4297 file_regex = re.compile(file_pattern)
4298 for filename in files_changed:
4298 for filename in files_changed:
4299 if file_regex.search(filename):
4299 if file_regex.search(filename):
4300 files_matches = True
4300 files_matches = True
4301 break
4301 break
4302
4302
4303 return branch_matches and files_matches
4303 return branch_matches and files_matches
4304
4304
4305 @property
4305 @property
4306 def review_users(self):
4306 def review_users(self):
4307 """ Returns the users which this rule applies to """
4307 """ Returns the users which this rule applies to """
4308
4308
4309 users = collections.OrderedDict()
4309 users = collections.OrderedDict()
4310
4310
4311 for rule_user in self.rule_users:
4311 for rule_user in self.rule_users:
4312 if rule_user.user.active:
4312 if rule_user.user.active:
4313 if rule_user.user not in users:
4313 if rule_user.user not in users:
4314 users[rule_user.user.username] = {
4314 users[rule_user.user.username] = {
4315 'user': rule_user.user,
4315 'user': rule_user.user,
4316 'source': 'user',
4316 'source': 'user',
4317 'source_data': {},
4317 'source_data': {},
4318 'data': rule_user.rule_data()
4318 'data': rule_user.rule_data()
4319 }
4319 }
4320
4320
4321 for rule_user_group in self.rule_user_groups:
4321 for rule_user_group in self.rule_user_groups:
4322 source_data = {
4322 source_data = {
4323 'user_group_id': rule_user_group.users_group.users_group_id,
4323 'user_group_id': rule_user_group.users_group.users_group_id,
4324 'name': rule_user_group.users_group.users_group_name,
4324 'name': rule_user_group.users_group.users_group_name,
4325 'members': len(rule_user_group.users_group.members)
4325 'members': len(rule_user_group.users_group.members)
4326 }
4326 }
4327 for member in rule_user_group.users_group.members:
4327 for member in rule_user_group.users_group.members:
4328 if member.user.active:
4328 if member.user.active:
4329 key = member.user.username
4329 key = member.user.username
4330 if key in users:
4330 if key in users:
4331 # skip this member as we have him already
4331 # skip this member as we have him already
4332 # this prevents from override the "first" matched
4332 # this prevents from override the "first" matched
4333 # users with duplicates in multiple groups
4333 # users with duplicates in multiple groups
4334 continue
4334 continue
4335
4335
4336 users[key] = {
4336 users[key] = {
4337 'user': member.user,
4337 'user': member.user,
4338 'source': 'user_group',
4338 'source': 'user_group',
4339 'source_data': source_data,
4339 'source_data': source_data,
4340 'data': rule_user_group.rule_data()
4340 'data': rule_user_group.rule_data()
4341 }
4341 }
4342
4342
4343 return users
4343 return users
4344
4344
4345 def user_group_vote_rule(self):
4345 def user_group_vote_rule(self):
4346 rules = []
4346 rules = []
4347 if self.rule_user_groups:
4347 if self.rule_user_groups:
4348 for user_group in self.rule_user_groups:
4348 for user_group in self.rule_user_groups:
4349 rules.append(user_group)
4349 rules.append(user_group)
4350 return rules
4350 return rules
4351
4351
4352 def __repr__(self):
4352 def __repr__(self):
4353 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4353 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4354 self.repo_review_rule_id, self.repo)
4354 self.repo_review_rule_id, self.repo)
4355
4355
4356
4356
4357 class ScheduleEntry(Base, BaseModel):
4357 class ScheduleEntry(Base, BaseModel):
4358 __tablename__ = 'schedule_entries'
4358 __tablename__ = 'schedule_entries'
4359 __table_args__ = (
4359 __table_args__ = (
4360 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4360 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4361 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4361 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4362 base_table_args,
4362 base_table_args,
4363 )
4363 )
4364
4364
4365 schedule_types = ['crontab', 'timedelta', 'integer']
4365 schedule_types = ['crontab', 'timedelta', 'integer']
4366 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4366 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4367
4367
4368 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4368 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4369 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4369 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4370 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4370 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4371
4371
4372 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4372 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4373 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4373 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4374
4374
4375 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4375 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4376 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4376 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4377
4377
4378 # task
4378 # task
4379 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4379 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4380 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4380 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4381 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4381 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4382 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4382 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4383
4383
4384 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4384 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4385 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4385 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4386
4386
4387 @hybrid_property
4387 @hybrid_property
4388 def schedule_type(self):
4388 def schedule_type(self):
4389 return self._schedule_type
4389 return self._schedule_type
4390
4390
4391 @schedule_type.setter
4391 @schedule_type.setter
4392 def schedule_type(self, val):
4392 def schedule_type(self, val):
4393 if val not in self.schedule_types:
4393 if val not in self.schedule_types:
4394 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4394 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4395 val, self.schedule_type))
4395 val, self.schedule_type))
4396
4396
4397 self._schedule_type = val
4397 self._schedule_type = val
4398
4398
4399 @classmethod
4399 @classmethod
4400 def get_uid(cls, obj):
4400 def get_uid(cls, obj):
4401 args = obj.task_args
4401 args = obj.task_args
4402 kwargs = obj.task_kwargs
4402 kwargs = obj.task_kwargs
4403 if isinstance(args, JsonRaw):
4403 if isinstance(args, JsonRaw):
4404 try:
4404 try:
4405 args = json.loads(args)
4405 args = json.loads(args)
4406 except ValueError:
4406 except ValueError:
4407 args = tuple()
4407 args = tuple()
4408
4408
4409 if isinstance(kwargs, JsonRaw):
4409 if isinstance(kwargs, JsonRaw):
4410 try:
4410 try:
4411 kwargs = json.loads(kwargs)
4411 kwargs = json.loads(kwargs)
4412 except ValueError:
4412 except ValueError:
4413 kwargs = dict()
4413 kwargs = dict()
4414
4414
4415 dot_notation = obj.task_dot_notation
4415 dot_notation = obj.task_dot_notation
4416 val = '.'.join(map(safe_str, [
4416 val = '.'.join(map(safe_str, [
4417 sorted(dot_notation), args, sorted(kwargs.items())]))
4417 sorted(dot_notation), args, sorted(kwargs.items())]))
4418 return hashlib.sha1(val).hexdigest()
4418 return hashlib.sha1(val).hexdigest()
4419
4419
4420 @classmethod
4420 @classmethod
4421 def get_by_schedule_name(cls, schedule_name):
4421 def get_by_schedule_name(cls, schedule_name):
4422 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4422 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4423
4423
4424 @classmethod
4424 @classmethod
4425 def get_by_schedule_id(cls, schedule_id):
4425 def get_by_schedule_id(cls, schedule_id):
4426 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4426 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4427
4427
4428 @property
4428 @property
4429 def task(self):
4429 def task(self):
4430 return self.task_dot_notation
4430 return self.task_dot_notation
4431
4431
4432 @property
4432 @property
4433 def schedule(self):
4433 def schedule(self):
4434 from rhodecode.lib.celerylib.utils import raw_2_schedule
4434 from rhodecode.lib.celerylib.utils import raw_2_schedule
4435 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4435 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4436 return schedule
4436 return schedule
4437
4437
4438 @property
4438 @property
4439 def args(self):
4439 def args(self):
4440 try:
4440 try:
4441 return list(self.task_args or [])
4441 return list(self.task_args or [])
4442 except ValueError:
4442 except ValueError:
4443 return list()
4443 return list()
4444
4444
4445 @property
4445 @property
4446 def kwargs(self):
4446 def kwargs(self):
4447 try:
4447 try:
4448 return dict(self.task_kwargs or {})
4448 return dict(self.task_kwargs or {})
4449 except ValueError:
4449 except ValueError:
4450 return dict()
4450 return dict()
4451
4451
4452 def _as_raw(self, val):
4452 def _as_raw(self, val):
4453 if hasattr(val, 'de_coerce'):
4453 if hasattr(val, 'de_coerce'):
4454 val = val.de_coerce()
4454 val = val.de_coerce()
4455 if val:
4455 if val:
4456 val = json.dumps(val)
4456 val = json.dumps(val)
4457
4457
4458 return val
4458 return val
4459
4459
4460 @property
4460 @property
4461 def schedule_definition_raw(self):
4461 def schedule_definition_raw(self):
4462 return self._as_raw(self.schedule_definition)
4462 return self._as_raw(self.schedule_definition)
4463
4463
4464 @property
4464 @property
4465 def args_raw(self):
4465 def args_raw(self):
4466 return self._as_raw(self.task_args)
4466 return self._as_raw(self.task_args)
4467
4467
4468 @property
4468 @property
4469 def kwargs_raw(self):
4469 def kwargs_raw(self):
4470 return self._as_raw(self.task_kwargs)
4470 return self._as_raw(self.task_kwargs)
4471
4471
4472 def __repr__(self):
4472 def __repr__(self):
4473 return '<DB:ScheduleEntry({}:{})>'.format(
4473 return '<DB:ScheduleEntry({}:{})>'.format(
4474 self.schedule_entry_id, self.schedule_name)
4474 self.schedule_entry_id, self.schedule_name)
4475
4475
4476
4476
4477 @event.listens_for(ScheduleEntry, 'before_update')
4477 @event.listens_for(ScheduleEntry, 'before_update')
4478 def update_task_uid(mapper, connection, target):
4478 def update_task_uid(mapper, connection, target):
4479 target.task_uid = ScheduleEntry.get_uid(target)
4479 target.task_uid = ScheduleEntry.get_uid(target)
4480
4480
4481
4481
4482 @event.listens_for(ScheduleEntry, 'before_insert')
4482 @event.listens_for(ScheduleEntry, 'before_insert')
4483 def set_task_uid(mapper, connection, target):
4483 def set_task_uid(mapper, connection, target):
4484 target.task_uid = ScheduleEntry.get_uid(target)
4484 target.task_uid = ScheduleEntry.get_uid(target)
4485
4485
4486
4486
4487 class DbMigrateVersion(Base, BaseModel):
4487 class DbMigrateVersion(Base, BaseModel):
4488 __tablename__ = 'db_migrate_version'
4488 __tablename__ = 'db_migrate_version'
4489 __table_args__ = (
4489 __table_args__ = (
4490 base_table_args,
4490 base_table_args,
4491 )
4491 )
4492
4492
4493 repository_id = Column('repository_id', String(250), primary_key=True)
4493 repository_id = Column('repository_id', String(250), primary_key=True)
4494 repository_path = Column('repository_path', Text)
4494 repository_path = Column('repository_path', Text)
4495 version = Column('version', Integer)
4495 version = Column('version', Integer)
4496
4496
4497
4497
4498 class DbSession(Base, BaseModel):
4498 class DbSession(Base, BaseModel):
4499 __tablename__ = 'db_session'
4499 __tablename__ = 'db_session'
4500 __table_args__ = (
4500 __table_args__ = (
4501 base_table_args,
4501 base_table_args,
4502 )
4502 )
4503
4503
4504 def __repr__(self):
4504 def __repr__(self):
4505 return '<DB:DbSession({})>'.format(self.id)
4505 return '<DB:DbSession({})>'.format(self.id)
4506
4506
4507 id = Column('id', Integer())
4507 id = Column('id', Integer())
4508 namespace = Column('namespace', String(255), primary_key=True)
4508 namespace = Column('namespace', String(255), primary_key=True)
4509 accessed = Column('accessed', DateTime, nullable=False)
4509 accessed = Column('accessed', DateTime, nullable=False)
4510 created = Column('created', DateTime, nullable=False)
4510 created = Column('created', DateTime, nullable=False)
4511 data = Column('data', PickleType, nullable=False)
4511 data = Column('data', PickleType, nullable=False)
4512
4512
4513
4513
4514 class BeakerCache(Base, BaseModel):
4514 class BeakerCache(Base, BaseModel):
4515 __tablename__ = 'beaker_cache'
4515 __tablename__ = 'beaker_cache'
4516 __table_args__ = (
4516 __table_args__ = (
4517 base_table_args,
4517 base_table_args,
4518 )
4518 )
4519
4519
4520 def __repr__(self):
4520 def __repr__(self):
4521 return '<DB:DbSession({})>'.format(self.id)
4521 return '<DB:DbSession({})>'.format(self.id)
4522
4522
4523 id = Column('id', Integer())
4523 id = Column('id', Integer())
4524 namespace = Column('namespace', String(255), primary_key=True)
4524 namespace = Column('namespace', String(255), primary_key=True)
4525 accessed = Column('accessed', DateTime, nullable=False)
4525 accessed = Column('accessed', DateTime, nullable=False)
4526 created = Column('created', DateTime, nullable=False)
4526 created = Column('created', DateTime, nullable=False)
4527 data = Column('data', PickleType, nullable=False)
4527 data = Column('data', PickleType, nullable=False)
@@ -1,50 +1,45 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SQLAlchemy Metadata and Session object
22 SQLAlchemy Metadata and Session object
23 """
23 """
24
24
25 from sqlalchemy.ext.declarative import declarative_base
25 from sqlalchemy.ext.declarative import declarative_base
26 from sqlalchemy.orm import scoped_session, sessionmaker
26 from sqlalchemy.orm import scoped_session, sessionmaker
27 from beaker import cache
28
27
29 from rhodecode.lib import caching_query
28 from rhodecode.lib import caching_query
30
29
31
30 __all__ = ['Base', 'Session']
32 # Beaker CacheManager. A home base for cache configurations.
33 cache_manager = cache.CacheManager()
34
31
35 __all__ = ['Base', 'Session']
32 # scoped_session. Apply our custom CachingQuery class to it,
36 #
33 # using a callable that will associate the dictionary
37 # SQLAlchemy session manager. Updated by model.init_model()
34 # of regions with the Query.
38 #
35 # to use cache use this in query
36 # .options(FromCache("sqlalchemy_cache_type", "cachekey"))
39 Session = scoped_session(
37 Session = scoped_session(
40 sessionmaker(
38 sessionmaker(
41 query_cls=caching_query.query_callable(cache_manager),
39 query_cls=caching_query.query_callable(),
42 expire_on_commit=True,
40 expire_on_commit=True,
43 )
41 )
44 )
42 )
45
43
46 # The declarative Base
44 # The declarative Base
47 Base = declarative_base()
45 Base = declarative_base()
48
49 #to use cache use this in query
50 #.options(FromCache("sqlalchemy_cache_type", "cachekey"))
@@ -1,830 +1,829 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import hashlib
22 import hashlib
23 import logging
23 import logging
24 import time
24 from collections import namedtuple
25 from collections import namedtuple
25 from functools import wraps
26 from functools import wraps
26 import bleach
27 import bleach
27
28
28 from rhodecode.lib import caches
29 from rhodecode.lib import caches, rc_cache
29 from rhodecode.lib.utils2 import (
30 from rhodecode.lib.utils2 import (
30 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
31 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
31 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.model import BaseModel
33 from rhodecode.model import BaseModel
33 from rhodecode.model.db import (
34 from rhodecode.model.db import (
34 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
35 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
35 from rhodecode.model.meta import Session
36 from rhodecode.model.meta import Session
36
37
37
38
38 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
39
40
40
41
41 UiSetting = namedtuple(
42 UiSetting = namedtuple(
42 'UiSetting', ['section', 'key', 'value', 'active'])
43 'UiSetting', ['section', 'key', 'value', 'active'])
43
44
44 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
45 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
45
46
46
47
47 class SettingNotFound(Exception):
48 class SettingNotFound(Exception):
48 def __init__(self, setting_id):
49 def __init__(self, setting_id):
49 msg = 'Setting `{}` is not found'.format(setting_id)
50 msg = 'Setting `{}` is not found'.format(setting_id)
50 super(SettingNotFound, self).__init__(msg)
51 super(SettingNotFound, self).__init__(msg)
51
52
52
53
53 class SettingsModel(BaseModel):
54 class SettingsModel(BaseModel):
54 BUILTIN_HOOKS = (
55 BUILTIN_HOOKS = (
55 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
56 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
56 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
57 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
57 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
58 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
58 RhodeCodeUi.HOOK_PUSH_KEY,)
59 RhodeCodeUi.HOOK_PUSH_KEY,)
59 HOOKS_SECTION = 'hooks'
60 HOOKS_SECTION = 'hooks'
60
61
61 def __init__(self, sa=None, repo=None):
62 def __init__(self, sa=None, repo=None):
62 self.repo = repo
63 self.repo = repo
63 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
64 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
64 self.SettingsDbModel = (
65 self.SettingsDbModel = (
65 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
66 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
66 super(SettingsModel, self).__init__(sa)
67 super(SettingsModel, self).__init__(sa)
67
68
68 def get_ui_by_key(self, key):
69 def get_ui_by_key(self, key):
69 q = self.UiDbModel.query()
70 q = self.UiDbModel.query()
70 q = q.filter(self.UiDbModel.ui_key == key)
71 q = q.filter(self.UiDbModel.ui_key == key)
71 q = self._filter_by_repo(RepoRhodeCodeUi, q)
72 q = self._filter_by_repo(RepoRhodeCodeUi, q)
72 return q.scalar()
73 return q.scalar()
73
74
74 def get_ui_by_section(self, section):
75 def get_ui_by_section(self, section):
75 q = self.UiDbModel.query()
76 q = self.UiDbModel.query()
76 q = q.filter(self.UiDbModel.ui_section == section)
77 q = q.filter(self.UiDbModel.ui_section == section)
77 q = self._filter_by_repo(RepoRhodeCodeUi, q)
78 q = self._filter_by_repo(RepoRhodeCodeUi, q)
78 return q.all()
79 return q.all()
79
80
80 def get_ui_by_section_and_key(self, section, key):
81 def get_ui_by_section_and_key(self, section, key):
81 q = self.UiDbModel.query()
82 q = self.UiDbModel.query()
82 q = q.filter(self.UiDbModel.ui_section == section)
83 q = q.filter(self.UiDbModel.ui_section == section)
83 q = q.filter(self.UiDbModel.ui_key == key)
84 q = q.filter(self.UiDbModel.ui_key == key)
84 q = self._filter_by_repo(RepoRhodeCodeUi, q)
85 q = self._filter_by_repo(RepoRhodeCodeUi, q)
85 return q.scalar()
86 return q.scalar()
86
87
87 def get_ui(self, section=None, key=None):
88 def get_ui(self, section=None, key=None):
88 q = self.UiDbModel.query()
89 q = self.UiDbModel.query()
89 q = self._filter_by_repo(RepoRhodeCodeUi, q)
90 q = self._filter_by_repo(RepoRhodeCodeUi, q)
90
91
91 if section:
92 if section:
92 q = q.filter(self.UiDbModel.ui_section == section)
93 q = q.filter(self.UiDbModel.ui_section == section)
93 if key:
94 if key:
94 q = q.filter(self.UiDbModel.ui_key == key)
95 q = q.filter(self.UiDbModel.ui_key == key)
95
96
96 # TODO: mikhail: add caching
97 # TODO: mikhail: add caching
97 result = [
98 result = [
98 UiSetting(
99 UiSetting(
99 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
100 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
100 value=safe_str(r.ui_value), active=r.ui_active
101 value=safe_str(r.ui_value), active=r.ui_active
101 )
102 )
102 for r in q.all()
103 for r in q.all()
103 ]
104 ]
104 return result
105 return result
105
106
106 def get_builtin_hooks(self):
107 def get_builtin_hooks(self):
107 q = self.UiDbModel.query()
108 q = self.UiDbModel.query()
108 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
109 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
109 return self._get_hooks(q)
110 return self._get_hooks(q)
110
111
111 def get_custom_hooks(self):
112 def get_custom_hooks(self):
112 q = self.UiDbModel.query()
113 q = self.UiDbModel.query()
113 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
114 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
114 return self._get_hooks(q)
115 return self._get_hooks(q)
115
116
116 def create_ui_section_value(self, section, val, key=None, active=True):
117 def create_ui_section_value(self, section, val, key=None, active=True):
117 new_ui = self.UiDbModel()
118 new_ui = self.UiDbModel()
118 new_ui.ui_section = section
119 new_ui.ui_section = section
119 new_ui.ui_value = val
120 new_ui.ui_value = val
120 new_ui.ui_active = active
121 new_ui.ui_active = active
121
122
122 if self.repo:
123 if self.repo:
123 repo = self._get_repo(self.repo)
124 repo = self._get_repo(self.repo)
124 repository_id = repo.repo_id
125 repository_id = repo.repo_id
125 new_ui.repository_id = repository_id
126 new_ui.repository_id = repository_id
126
127
127 if not key:
128 if not key:
128 # keys are unique so they need appended info
129 # keys are unique so they need appended info
129 if self.repo:
130 if self.repo:
130 key = hashlib.sha1(
131 key = hashlib.sha1(
131 '{}{}{}'.format(section, val, repository_id)).hexdigest()
132 '{}{}{}'.format(section, val, repository_id)).hexdigest()
132 else:
133 else:
133 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
134 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
134
135
135 new_ui.ui_key = key
136 new_ui.ui_key = key
136
137
137 Session().add(new_ui)
138 Session().add(new_ui)
138 return new_ui
139 return new_ui
139
140
140 def create_or_update_hook(self, key, value):
141 def create_or_update_hook(self, key, value):
141 ui = (
142 ui = (
142 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
143 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
143 self.UiDbModel())
144 self.UiDbModel())
144 ui.ui_section = self.HOOKS_SECTION
145 ui.ui_section = self.HOOKS_SECTION
145 ui.ui_active = True
146 ui.ui_active = True
146 ui.ui_key = key
147 ui.ui_key = key
147 ui.ui_value = value
148 ui.ui_value = value
148
149
149 if self.repo:
150 if self.repo:
150 repo = self._get_repo(self.repo)
151 repo = self._get_repo(self.repo)
151 repository_id = repo.repo_id
152 repository_id = repo.repo_id
152 ui.repository_id = repository_id
153 ui.repository_id = repository_id
153
154
154 Session().add(ui)
155 Session().add(ui)
155 return ui
156 return ui
156
157
157 def delete_ui(self, id_):
158 def delete_ui(self, id_):
158 ui = self.UiDbModel.get(id_)
159 ui = self.UiDbModel.get(id_)
159 if not ui:
160 if not ui:
160 raise SettingNotFound(id_)
161 raise SettingNotFound(id_)
161 Session().delete(ui)
162 Session().delete(ui)
162
163
163 def get_setting_by_name(self, name):
164 def get_setting_by_name(self, name):
164 q = self._get_settings_query()
165 q = self._get_settings_query()
165 q = q.filter(self.SettingsDbModel.app_settings_name == name)
166 q = q.filter(self.SettingsDbModel.app_settings_name == name)
166 return q.scalar()
167 return q.scalar()
167
168
168 def create_or_update_setting(
169 def create_or_update_setting(
169 self, name, val=Optional(''), type_=Optional('unicode')):
170 self, name, val=Optional(''), type_=Optional('unicode')):
170 """
171 """
171 Creates or updates RhodeCode setting. If updates is triggered it will
172 Creates or updates RhodeCode setting. If updates is triggered it will
172 only update parameters that are explicityl set Optional instance will
173 only update parameters that are explicityl set Optional instance will
173 be skipped
174 be skipped
174
175
175 :param name:
176 :param name:
176 :param val:
177 :param val:
177 :param type_:
178 :param type_:
178 :return:
179 :return:
179 """
180 """
180
181
181 res = self.get_setting_by_name(name)
182 res = self.get_setting_by_name(name)
182 repo = self._get_repo(self.repo) if self.repo else None
183 repo = self._get_repo(self.repo) if self.repo else None
183
184
184 if not res:
185 if not res:
185 val = Optional.extract(val)
186 val = Optional.extract(val)
186 type_ = Optional.extract(type_)
187 type_ = Optional.extract(type_)
187
188
188 args = (
189 args = (
189 (repo.repo_id, name, val, type_)
190 (repo.repo_id, name, val, type_)
190 if repo else (name, val, type_))
191 if repo else (name, val, type_))
191 res = self.SettingsDbModel(*args)
192 res = self.SettingsDbModel(*args)
192
193
193 else:
194 else:
194 if self.repo:
195 if self.repo:
195 res.repository_id = repo.repo_id
196 res.repository_id = repo.repo_id
196
197
197 res.app_settings_name = name
198 res.app_settings_name = name
198 if not isinstance(type_, Optional):
199 if not isinstance(type_, Optional):
199 # update if set
200 # update if set
200 res.app_settings_type = type_
201 res.app_settings_type = type_
201 if not isinstance(val, Optional):
202 if not isinstance(val, Optional):
202 # update if set
203 # update if set
203 res.app_settings_value = val
204 res.app_settings_value = val
204
205
205 Session().add(res)
206 Session().add(res)
206 return res
207 return res
207
208
208 def invalidate_settings_cache(self):
209 def invalidate_settings_cache(self):
209 namespace = 'rhodecode_settings'
210 # NOTE:(marcink) we flush the whole sql_cache_short region, because it
210 cache_manager = caches.get_cache_manager('sql_cache_short', namespace)
211 # reads different settings etc. It's little too much but those caches are
211 caches.clear_cache_manager(cache_manager)
212 # anyway very short lived and it's a safest way.
213 region = rc_cache.get_or_create_region('sql_cache_short')
214 region.invalidate()
212
215
213 def get_all_settings(self, cache=False):
216 def get_all_settings(self, cache=False):
217 region = rc_cache.get_or_create_region('sql_cache_short')
214
218
215 def _compute():
219 @region.cache_on_arguments(should_cache_fn=lambda v: cache)
220 def _get_all_settings(name, key):
216 q = self._get_settings_query()
221 q = self._get_settings_query()
217 if not q:
222 if not q:
218 raise Exception('Could not get application settings !')
223 raise Exception('Could not get application settings !')
219
224
220 settings = {
225 settings = {
221 'rhodecode_' + result.app_settings_name: result.app_settings_value
226 'rhodecode_' + result.app_settings_name: result.app_settings_value
222 for result in q
227 for result in q
223 }
228 }
224 return settings
229 return settings
225
230
226 if cache:
227 log.debug('Fetching app settings using cache')
228 repo = self._get_repo(self.repo) if self.repo else None
231 repo = self._get_repo(self.repo) if self.repo else None
229 namespace = 'rhodecode_settings'
232 key = "settings_repo.{}".format(repo.repo_id) if repo else "settings_app"
230 cache_manager = caches.get_cache_manager(
233 start = time.time()
231 'sql_cache_short', namespace)
234 result = _get_all_settings('rhodecode_settings', key)
232 _cache_key = (
235 total = time.time() - start
233 "get_repo_{}_settings".format(repo.repo_id)
236 log.debug('Fetching app settings for key: %s took: %.3fs', key, total)
234 if repo else "get_app_settings")
235
237
236 return cache_manager.get(_cache_key, createfunc=_compute)
238 return result
237
238 else:
239 return _compute()
240
239
241 def get_auth_settings(self):
240 def get_auth_settings(self):
242 q = self._get_settings_query()
241 q = self._get_settings_query()
243 q = q.filter(
242 q = q.filter(
244 self.SettingsDbModel.app_settings_name.startswith('auth_'))
243 self.SettingsDbModel.app_settings_name.startswith('auth_'))
245 rows = q.all()
244 rows = q.all()
246 auth_settings = {
245 auth_settings = {
247 row.app_settings_name: row.app_settings_value for row in rows}
246 row.app_settings_name: row.app_settings_value for row in rows}
248 return auth_settings
247 return auth_settings
249
248
250 def get_auth_plugins(self):
249 def get_auth_plugins(self):
251 auth_plugins = self.get_setting_by_name("auth_plugins")
250 auth_plugins = self.get_setting_by_name("auth_plugins")
252 return auth_plugins.app_settings_value
251 return auth_plugins.app_settings_value
253
252
254 def get_default_repo_settings(self, strip_prefix=False):
253 def get_default_repo_settings(self, strip_prefix=False):
255 q = self._get_settings_query()
254 q = self._get_settings_query()
256 q = q.filter(
255 q = q.filter(
257 self.SettingsDbModel.app_settings_name.startswith('default_'))
256 self.SettingsDbModel.app_settings_name.startswith('default_'))
258 rows = q.all()
257 rows = q.all()
259
258
260 result = {}
259 result = {}
261 for row in rows:
260 for row in rows:
262 key = row.app_settings_name
261 key = row.app_settings_name
263 if strip_prefix:
262 if strip_prefix:
264 key = remove_prefix(key, prefix='default_')
263 key = remove_prefix(key, prefix='default_')
265 result.update({key: row.app_settings_value})
264 result.update({key: row.app_settings_value})
266 return result
265 return result
267
266
268 def get_repo(self):
267 def get_repo(self):
269 repo = self._get_repo(self.repo)
268 repo = self._get_repo(self.repo)
270 if not repo:
269 if not repo:
271 raise Exception(
270 raise Exception(
272 'Repository `{}` cannot be found inside the database'.format(
271 'Repository `{}` cannot be found inside the database'.format(
273 self.repo))
272 self.repo))
274 return repo
273 return repo
275
274
276 def _filter_by_repo(self, model, query):
275 def _filter_by_repo(self, model, query):
277 if self.repo:
276 if self.repo:
278 repo = self.get_repo()
277 repo = self.get_repo()
279 query = query.filter(model.repository_id == repo.repo_id)
278 query = query.filter(model.repository_id == repo.repo_id)
280 return query
279 return query
281
280
282 def _get_hooks(self, query):
281 def _get_hooks(self, query):
283 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
282 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
284 query = self._filter_by_repo(RepoRhodeCodeUi, query)
283 query = self._filter_by_repo(RepoRhodeCodeUi, query)
285 return query.all()
284 return query.all()
286
285
287 def _get_settings_query(self):
286 def _get_settings_query(self):
288 q = self.SettingsDbModel.query()
287 q = self.SettingsDbModel.query()
289 return self._filter_by_repo(RepoRhodeCodeSetting, q)
288 return self._filter_by_repo(RepoRhodeCodeSetting, q)
290
289
291 def list_enabled_social_plugins(self, settings):
290 def list_enabled_social_plugins(self, settings):
292 enabled = []
291 enabled = []
293 for plug in SOCIAL_PLUGINS_LIST:
292 for plug in SOCIAL_PLUGINS_LIST:
294 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
293 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
295 )):
294 )):
296 enabled.append(plug)
295 enabled.append(plug)
297 return enabled
296 return enabled
298
297
299
298
300 def assert_repo_settings(func):
299 def assert_repo_settings(func):
301 @wraps(func)
300 @wraps(func)
302 def _wrapper(self, *args, **kwargs):
301 def _wrapper(self, *args, **kwargs):
303 if not self.repo_settings:
302 if not self.repo_settings:
304 raise Exception('Repository is not specified')
303 raise Exception('Repository is not specified')
305 return func(self, *args, **kwargs)
304 return func(self, *args, **kwargs)
306 return _wrapper
305 return _wrapper
307
306
308
307
309 class IssueTrackerSettingsModel(object):
308 class IssueTrackerSettingsModel(object):
310 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
309 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
311 SETTINGS_PREFIX = 'issuetracker_'
310 SETTINGS_PREFIX = 'issuetracker_'
312
311
313 def __init__(self, sa=None, repo=None):
312 def __init__(self, sa=None, repo=None):
314 self.global_settings = SettingsModel(sa=sa)
313 self.global_settings = SettingsModel(sa=sa)
315 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
314 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
316
315
317 @property
316 @property
318 def inherit_global_settings(self):
317 def inherit_global_settings(self):
319 if not self.repo_settings:
318 if not self.repo_settings:
320 return True
319 return True
321 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
320 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
322 return setting.app_settings_value if setting else True
321 return setting.app_settings_value if setting else True
323
322
324 @inherit_global_settings.setter
323 @inherit_global_settings.setter
325 def inherit_global_settings(self, value):
324 def inherit_global_settings(self, value):
326 if self.repo_settings:
325 if self.repo_settings:
327 settings = self.repo_settings.create_or_update_setting(
326 settings = self.repo_settings.create_or_update_setting(
328 self.INHERIT_SETTINGS, value, type_='bool')
327 self.INHERIT_SETTINGS, value, type_='bool')
329 Session().add(settings)
328 Session().add(settings)
330
329
331 def _get_keyname(self, key, uid, prefix=''):
330 def _get_keyname(self, key, uid, prefix=''):
332 return '{0}{1}{2}_{3}'.format(
331 return '{0}{1}{2}_{3}'.format(
333 prefix, self.SETTINGS_PREFIX, key, uid)
332 prefix, self.SETTINGS_PREFIX, key, uid)
334
333
335 def _make_dict_for_settings(self, qs):
334 def _make_dict_for_settings(self, qs):
336 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
335 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
337
336
338 issuetracker_entries = {}
337 issuetracker_entries = {}
339 # create keys
338 # create keys
340 for k, v in qs.items():
339 for k, v in qs.items():
341 if k.startswith(prefix_match):
340 if k.startswith(prefix_match):
342 uid = k[len(prefix_match):]
341 uid = k[len(prefix_match):]
343 issuetracker_entries[uid] = None
342 issuetracker_entries[uid] = None
344
343
345 # populate
344 # populate
346 for uid in issuetracker_entries:
345 for uid in issuetracker_entries:
347 issuetracker_entries[uid] = AttributeDict({
346 issuetracker_entries[uid] = AttributeDict({
348 'pat': qs.get(
347 'pat': qs.get(
349 self._get_keyname('pat', uid, 'rhodecode_')),
348 self._get_keyname('pat', uid, 'rhodecode_')),
350 'url': bleach.clean(
349 'url': bleach.clean(
351 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
350 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
352 'pref': bleach.clean(
351 'pref': bleach.clean(
353 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
352 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
354 'desc': qs.get(
353 'desc': qs.get(
355 self._get_keyname('desc', uid, 'rhodecode_')),
354 self._get_keyname('desc', uid, 'rhodecode_')),
356 })
355 })
357 return issuetracker_entries
356 return issuetracker_entries
358
357
359 def get_global_settings(self, cache=False):
358 def get_global_settings(self, cache=False):
360 """
359 """
361 Returns list of global issue tracker settings
360 Returns list of global issue tracker settings
362 """
361 """
363 defaults = self.global_settings.get_all_settings(cache=cache)
362 defaults = self.global_settings.get_all_settings(cache=cache)
364 settings = self._make_dict_for_settings(defaults)
363 settings = self._make_dict_for_settings(defaults)
365 return settings
364 return settings
366
365
367 def get_repo_settings(self, cache=False):
366 def get_repo_settings(self, cache=False):
368 """
367 """
369 Returns list of issue tracker settings per repository
368 Returns list of issue tracker settings per repository
370 """
369 """
371 if not self.repo_settings:
370 if not self.repo_settings:
372 raise Exception('Repository is not specified')
371 raise Exception('Repository is not specified')
373 all_settings = self.repo_settings.get_all_settings(cache=cache)
372 all_settings = self.repo_settings.get_all_settings(cache=cache)
374 settings = self._make_dict_for_settings(all_settings)
373 settings = self._make_dict_for_settings(all_settings)
375 return settings
374 return settings
376
375
377 def get_settings(self, cache=False):
376 def get_settings(self, cache=False):
378 if self.inherit_global_settings:
377 if self.inherit_global_settings:
379 return self.get_global_settings(cache=cache)
378 return self.get_global_settings(cache=cache)
380 else:
379 else:
381 return self.get_repo_settings(cache=cache)
380 return self.get_repo_settings(cache=cache)
382
381
383 def delete_entries(self, uid):
382 def delete_entries(self, uid):
384 if self.repo_settings:
383 if self.repo_settings:
385 all_patterns = self.get_repo_settings()
384 all_patterns = self.get_repo_settings()
386 settings_model = self.repo_settings
385 settings_model = self.repo_settings
387 else:
386 else:
388 all_patterns = self.get_global_settings()
387 all_patterns = self.get_global_settings()
389 settings_model = self.global_settings
388 settings_model = self.global_settings
390 entries = all_patterns.get(uid, [])
389 entries = all_patterns.get(uid, [])
391
390
392 for del_key in entries:
391 for del_key in entries:
393 setting_name = self._get_keyname(del_key, uid)
392 setting_name = self._get_keyname(del_key, uid)
394 entry = settings_model.get_setting_by_name(setting_name)
393 entry = settings_model.get_setting_by_name(setting_name)
395 if entry:
394 if entry:
396 Session().delete(entry)
395 Session().delete(entry)
397
396
398 Session().commit()
397 Session().commit()
399
398
400 def create_or_update_setting(
399 def create_or_update_setting(
401 self, name, val=Optional(''), type_=Optional('unicode')):
400 self, name, val=Optional(''), type_=Optional('unicode')):
402 if self.repo_settings:
401 if self.repo_settings:
403 setting = self.repo_settings.create_or_update_setting(
402 setting = self.repo_settings.create_or_update_setting(
404 name, val, type_)
403 name, val, type_)
405 else:
404 else:
406 setting = self.global_settings.create_or_update_setting(
405 setting = self.global_settings.create_or_update_setting(
407 name, val, type_)
406 name, val, type_)
408 return setting
407 return setting
409
408
410
409
411 class VcsSettingsModel(object):
410 class VcsSettingsModel(object):
412
411
413 INHERIT_SETTINGS = 'inherit_vcs_settings'
412 INHERIT_SETTINGS = 'inherit_vcs_settings'
414 GENERAL_SETTINGS = (
413 GENERAL_SETTINGS = (
415 'use_outdated_comments',
414 'use_outdated_comments',
416 'pr_merge_enabled',
415 'pr_merge_enabled',
417 'hg_use_rebase_for_merging',
416 'hg_use_rebase_for_merging',
418 'hg_close_branch_before_merging',
417 'hg_close_branch_before_merging',
419 'git_use_rebase_for_merging',
418 'git_use_rebase_for_merging',
420 'git_close_branch_before_merging',
419 'git_close_branch_before_merging',
421 'diff_cache',
420 'diff_cache',
422 )
421 )
423
422
424 HOOKS_SETTINGS = (
423 HOOKS_SETTINGS = (
425 ('hooks', 'changegroup.repo_size'),
424 ('hooks', 'changegroup.repo_size'),
426 ('hooks', 'changegroup.push_logger'),
425 ('hooks', 'changegroup.push_logger'),
427 ('hooks', 'outgoing.pull_logger'),)
426 ('hooks', 'outgoing.pull_logger'),)
428 HG_SETTINGS = (
427 HG_SETTINGS = (
429 ('extensions', 'largefiles'),
428 ('extensions', 'largefiles'),
430 ('phases', 'publish'),
429 ('phases', 'publish'),
431 ('extensions', 'evolve'),)
430 ('extensions', 'evolve'),)
432 GIT_SETTINGS = (
431 GIT_SETTINGS = (
433 ('vcs_git_lfs', 'enabled'),)
432 ('vcs_git_lfs', 'enabled'),)
434 GLOBAL_HG_SETTINGS = (
433 GLOBAL_HG_SETTINGS = (
435 ('extensions', 'largefiles'),
434 ('extensions', 'largefiles'),
436 ('largefiles', 'usercache'),
435 ('largefiles', 'usercache'),
437 ('phases', 'publish'),
436 ('phases', 'publish'),
438 ('extensions', 'hgsubversion'),
437 ('extensions', 'hgsubversion'),
439 ('extensions', 'evolve'),)
438 ('extensions', 'evolve'),)
440 GLOBAL_GIT_SETTINGS = (
439 GLOBAL_GIT_SETTINGS = (
441 ('vcs_git_lfs', 'enabled'),
440 ('vcs_git_lfs', 'enabled'),
442 ('vcs_git_lfs', 'store_location'))
441 ('vcs_git_lfs', 'store_location'))
443
442
444 GLOBAL_SVN_SETTINGS = (
443 GLOBAL_SVN_SETTINGS = (
445 ('vcs_svn_proxy', 'http_requests_enabled'),
444 ('vcs_svn_proxy', 'http_requests_enabled'),
446 ('vcs_svn_proxy', 'http_server_url'))
445 ('vcs_svn_proxy', 'http_server_url'))
447
446
448 SVN_BRANCH_SECTION = 'vcs_svn_branch'
447 SVN_BRANCH_SECTION = 'vcs_svn_branch'
449 SVN_TAG_SECTION = 'vcs_svn_tag'
448 SVN_TAG_SECTION = 'vcs_svn_tag'
450 SSL_SETTING = ('web', 'push_ssl')
449 SSL_SETTING = ('web', 'push_ssl')
451 PATH_SETTING = ('paths', '/')
450 PATH_SETTING = ('paths', '/')
452
451
453 def __init__(self, sa=None, repo=None):
452 def __init__(self, sa=None, repo=None):
454 self.global_settings = SettingsModel(sa=sa)
453 self.global_settings = SettingsModel(sa=sa)
455 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
454 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
456 self._ui_settings = (
455 self._ui_settings = (
457 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
456 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
458 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
457 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
459
458
460 @property
459 @property
461 @assert_repo_settings
460 @assert_repo_settings
462 def inherit_global_settings(self):
461 def inherit_global_settings(self):
463 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
462 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
464 return setting.app_settings_value if setting else True
463 return setting.app_settings_value if setting else True
465
464
466 @inherit_global_settings.setter
465 @inherit_global_settings.setter
467 @assert_repo_settings
466 @assert_repo_settings
468 def inherit_global_settings(self, value):
467 def inherit_global_settings(self, value):
469 self.repo_settings.create_or_update_setting(
468 self.repo_settings.create_or_update_setting(
470 self.INHERIT_SETTINGS, value, type_='bool')
469 self.INHERIT_SETTINGS, value, type_='bool')
471
470
472 def get_global_svn_branch_patterns(self):
471 def get_global_svn_branch_patterns(self):
473 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
472 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
474
473
475 @assert_repo_settings
474 @assert_repo_settings
476 def get_repo_svn_branch_patterns(self):
475 def get_repo_svn_branch_patterns(self):
477 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
476 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
478
477
479 def get_global_svn_tag_patterns(self):
478 def get_global_svn_tag_patterns(self):
480 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
479 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
481
480
482 @assert_repo_settings
481 @assert_repo_settings
483 def get_repo_svn_tag_patterns(self):
482 def get_repo_svn_tag_patterns(self):
484 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
483 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
485
484
486 def get_global_settings(self):
485 def get_global_settings(self):
487 return self._collect_all_settings(global_=True)
486 return self._collect_all_settings(global_=True)
488
487
489 @assert_repo_settings
488 @assert_repo_settings
490 def get_repo_settings(self):
489 def get_repo_settings(self):
491 return self._collect_all_settings(global_=False)
490 return self._collect_all_settings(global_=False)
492
491
493 @assert_repo_settings
492 @assert_repo_settings
494 def create_or_update_repo_settings(
493 def create_or_update_repo_settings(
495 self, data, inherit_global_settings=False):
494 self, data, inherit_global_settings=False):
496 from rhodecode.model.scm import ScmModel
495 from rhodecode.model.scm import ScmModel
497
496
498 self.inherit_global_settings = inherit_global_settings
497 self.inherit_global_settings = inherit_global_settings
499
498
500 repo = self.repo_settings.get_repo()
499 repo = self.repo_settings.get_repo()
501 if not inherit_global_settings:
500 if not inherit_global_settings:
502 if repo.repo_type == 'svn':
501 if repo.repo_type == 'svn':
503 self.create_repo_svn_settings(data)
502 self.create_repo_svn_settings(data)
504 else:
503 else:
505 self.create_or_update_repo_hook_settings(data)
504 self.create_or_update_repo_hook_settings(data)
506 self.create_or_update_repo_pr_settings(data)
505 self.create_or_update_repo_pr_settings(data)
507
506
508 if repo.repo_type == 'hg':
507 if repo.repo_type == 'hg':
509 self.create_or_update_repo_hg_settings(data)
508 self.create_or_update_repo_hg_settings(data)
510
509
511 if repo.repo_type == 'git':
510 if repo.repo_type == 'git':
512 self.create_or_update_repo_git_settings(data)
511 self.create_or_update_repo_git_settings(data)
513
512
514 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
513 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
515
514
516 @assert_repo_settings
515 @assert_repo_settings
517 def create_or_update_repo_hook_settings(self, data):
516 def create_or_update_repo_hook_settings(self, data):
518 for section, key in self.HOOKS_SETTINGS:
517 for section, key in self.HOOKS_SETTINGS:
519 data_key = self._get_form_ui_key(section, key)
518 data_key = self._get_form_ui_key(section, key)
520 if data_key not in data:
519 if data_key not in data:
521 raise ValueError(
520 raise ValueError(
522 'The given data does not contain {} key'.format(data_key))
521 'The given data does not contain {} key'.format(data_key))
523
522
524 active = data.get(data_key)
523 active = data.get(data_key)
525 repo_setting = self.repo_settings.get_ui_by_section_and_key(
524 repo_setting = self.repo_settings.get_ui_by_section_and_key(
526 section, key)
525 section, key)
527 if not repo_setting:
526 if not repo_setting:
528 global_setting = self.global_settings.\
527 global_setting = self.global_settings.\
529 get_ui_by_section_and_key(section, key)
528 get_ui_by_section_and_key(section, key)
530 self.repo_settings.create_ui_section_value(
529 self.repo_settings.create_ui_section_value(
531 section, global_setting.ui_value, key=key, active=active)
530 section, global_setting.ui_value, key=key, active=active)
532 else:
531 else:
533 repo_setting.ui_active = active
532 repo_setting.ui_active = active
534 Session().add(repo_setting)
533 Session().add(repo_setting)
535
534
536 def update_global_hook_settings(self, data):
535 def update_global_hook_settings(self, data):
537 for section, key in self.HOOKS_SETTINGS:
536 for section, key in self.HOOKS_SETTINGS:
538 data_key = self._get_form_ui_key(section, key)
537 data_key = self._get_form_ui_key(section, key)
539 if data_key not in data:
538 if data_key not in data:
540 raise ValueError(
539 raise ValueError(
541 'The given data does not contain {} key'.format(data_key))
540 'The given data does not contain {} key'.format(data_key))
542 active = data.get(data_key)
541 active = data.get(data_key)
543 repo_setting = self.global_settings.get_ui_by_section_and_key(
542 repo_setting = self.global_settings.get_ui_by_section_and_key(
544 section, key)
543 section, key)
545 repo_setting.ui_active = active
544 repo_setting.ui_active = active
546 Session().add(repo_setting)
545 Session().add(repo_setting)
547
546
548 @assert_repo_settings
547 @assert_repo_settings
549 def create_or_update_repo_pr_settings(self, data):
548 def create_or_update_repo_pr_settings(self, data):
550 return self._create_or_update_general_settings(
549 return self._create_or_update_general_settings(
551 self.repo_settings, data)
550 self.repo_settings, data)
552
551
553 def create_or_update_global_pr_settings(self, data):
552 def create_or_update_global_pr_settings(self, data):
554 return self._create_or_update_general_settings(
553 return self._create_or_update_general_settings(
555 self.global_settings, data)
554 self.global_settings, data)
556
555
557 @assert_repo_settings
556 @assert_repo_settings
558 def create_repo_svn_settings(self, data):
557 def create_repo_svn_settings(self, data):
559 return self._create_svn_settings(self.repo_settings, data)
558 return self._create_svn_settings(self.repo_settings, data)
560
559
561 @assert_repo_settings
560 @assert_repo_settings
562 def create_or_update_repo_hg_settings(self, data):
561 def create_or_update_repo_hg_settings(self, data):
563 largefiles, phases, evolve = \
562 largefiles, phases, evolve = \
564 self.HG_SETTINGS
563 self.HG_SETTINGS
565 largefiles_key, phases_key, evolve_key = \
564 largefiles_key, phases_key, evolve_key = \
566 self._get_settings_keys(self.HG_SETTINGS, data)
565 self._get_settings_keys(self.HG_SETTINGS, data)
567
566
568 self._create_or_update_ui(
567 self._create_or_update_ui(
569 self.repo_settings, *largefiles, value='',
568 self.repo_settings, *largefiles, value='',
570 active=data[largefiles_key])
569 active=data[largefiles_key])
571 self._create_or_update_ui(
570 self._create_or_update_ui(
572 self.repo_settings, *evolve, value='',
571 self.repo_settings, *evolve, value='',
573 active=data[evolve_key])
572 active=data[evolve_key])
574 self._create_or_update_ui(
573 self._create_or_update_ui(
575 self.repo_settings, *phases, value=safe_str(data[phases_key]))
574 self.repo_settings, *phases, value=safe_str(data[phases_key]))
576
575
577
576
578 def create_or_update_global_hg_settings(self, data):
577 def create_or_update_global_hg_settings(self, data):
579 largefiles, largefiles_store, phases, hgsubversion, evolve \
578 largefiles, largefiles_store, phases, hgsubversion, evolve \
580 = self.GLOBAL_HG_SETTINGS
579 = self.GLOBAL_HG_SETTINGS
581 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
580 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
582 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS, data)
581 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS, data)
583
582
584 self._create_or_update_ui(
583 self._create_or_update_ui(
585 self.global_settings, *largefiles, value='',
584 self.global_settings, *largefiles, value='',
586 active=data[largefiles_key])
585 active=data[largefiles_key])
587 self._create_or_update_ui(
586 self._create_or_update_ui(
588 self.global_settings, *largefiles_store,
587 self.global_settings, *largefiles_store,
589 value=data[largefiles_store_key])
588 value=data[largefiles_store_key])
590 self._create_or_update_ui(
589 self._create_or_update_ui(
591 self.global_settings, *phases, value=safe_str(data[phases_key]))
590 self.global_settings, *phases, value=safe_str(data[phases_key]))
592 self._create_or_update_ui(
591 self._create_or_update_ui(
593 self.global_settings, *hgsubversion, active=data[subversion_key])
592 self.global_settings, *hgsubversion, active=data[subversion_key])
594 self._create_or_update_ui(
593 self._create_or_update_ui(
595 self.global_settings, *evolve, value='',
594 self.global_settings, *evolve, value='',
596 active=data[evolve_key])
595 active=data[evolve_key])
597
596
598 def create_or_update_repo_git_settings(self, data):
597 def create_or_update_repo_git_settings(self, data):
599 # NOTE(marcink): # comma make unpack work properly
598 # NOTE(marcink): # comma make unpack work properly
600 lfs_enabled, \
599 lfs_enabled, \
601 = self.GIT_SETTINGS
600 = self.GIT_SETTINGS
602
601
603 lfs_enabled_key, \
602 lfs_enabled_key, \
604 = self._get_settings_keys(self.GIT_SETTINGS, data)
603 = self._get_settings_keys(self.GIT_SETTINGS, data)
605
604
606 self._create_or_update_ui(
605 self._create_or_update_ui(
607 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
606 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
608 active=data[lfs_enabled_key])
607 active=data[lfs_enabled_key])
609
608
610 def create_or_update_global_git_settings(self, data):
609 def create_or_update_global_git_settings(self, data):
611 lfs_enabled, lfs_store_location \
610 lfs_enabled, lfs_store_location \
612 = self.GLOBAL_GIT_SETTINGS
611 = self.GLOBAL_GIT_SETTINGS
613 lfs_enabled_key, lfs_store_location_key \
612 lfs_enabled_key, lfs_store_location_key \
614 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
613 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
615
614
616 self._create_or_update_ui(
615 self._create_or_update_ui(
617 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
616 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
618 active=data[lfs_enabled_key])
617 active=data[lfs_enabled_key])
619 self._create_or_update_ui(
618 self._create_or_update_ui(
620 self.global_settings, *lfs_store_location,
619 self.global_settings, *lfs_store_location,
621 value=data[lfs_store_location_key])
620 value=data[lfs_store_location_key])
622
621
623 def create_or_update_global_svn_settings(self, data):
622 def create_or_update_global_svn_settings(self, data):
624 # branch/tags patterns
623 # branch/tags patterns
625 self._create_svn_settings(self.global_settings, data)
624 self._create_svn_settings(self.global_settings, data)
626
625
627 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
626 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
628 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
627 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
629 self.GLOBAL_SVN_SETTINGS, data)
628 self.GLOBAL_SVN_SETTINGS, data)
630
629
631 self._create_or_update_ui(
630 self._create_or_update_ui(
632 self.global_settings, *http_requests_enabled,
631 self.global_settings, *http_requests_enabled,
633 value=safe_str(data[http_requests_enabled_key]))
632 value=safe_str(data[http_requests_enabled_key]))
634 self._create_or_update_ui(
633 self._create_or_update_ui(
635 self.global_settings, *http_server_url,
634 self.global_settings, *http_server_url,
636 value=data[http_server_url_key])
635 value=data[http_server_url_key])
637
636
638 def update_global_ssl_setting(self, value):
637 def update_global_ssl_setting(self, value):
639 self._create_or_update_ui(
638 self._create_or_update_ui(
640 self.global_settings, *self.SSL_SETTING, value=value)
639 self.global_settings, *self.SSL_SETTING, value=value)
641
640
642 def update_global_path_setting(self, value):
641 def update_global_path_setting(self, value):
643 self._create_or_update_ui(
642 self._create_or_update_ui(
644 self.global_settings, *self.PATH_SETTING, value=value)
643 self.global_settings, *self.PATH_SETTING, value=value)
645
644
646 @assert_repo_settings
645 @assert_repo_settings
647 def delete_repo_svn_pattern(self, id_):
646 def delete_repo_svn_pattern(self, id_):
648 ui = self.repo_settings.UiDbModel.get(id_)
647 ui = self.repo_settings.UiDbModel.get(id_)
649 if ui and ui.repository.repo_name == self.repo_settings.repo:
648 if ui and ui.repository.repo_name == self.repo_settings.repo:
650 # only delete if it's the same repo as initialized settings
649 # only delete if it's the same repo as initialized settings
651 self.repo_settings.delete_ui(id_)
650 self.repo_settings.delete_ui(id_)
652 else:
651 else:
653 # raise error as if we wouldn't find this option
652 # raise error as if we wouldn't find this option
654 self.repo_settings.delete_ui(-1)
653 self.repo_settings.delete_ui(-1)
655
654
656 def delete_global_svn_pattern(self, id_):
655 def delete_global_svn_pattern(self, id_):
657 self.global_settings.delete_ui(id_)
656 self.global_settings.delete_ui(id_)
658
657
659 @assert_repo_settings
658 @assert_repo_settings
660 def get_repo_ui_settings(self, section=None, key=None):
659 def get_repo_ui_settings(self, section=None, key=None):
661 global_uis = self.global_settings.get_ui(section, key)
660 global_uis = self.global_settings.get_ui(section, key)
662 repo_uis = self.repo_settings.get_ui(section, key)
661 repo_uis = self.repo_settings.get_ui(section, key)
663 filtered_repo_uis = self._filter_ui_settings(repo_uis)
662 filtered_repo_uis = self._filter_ui_settings(repo_uis)
664 filtered_repo_uis_keys = [
663 filtered_repo_uis_keys = [
665 (s.section, s.key) for s in filtered_repo_uis]
664 (s.section, s.key) for s in filtered_repo_uis]
666
665
667 def _is_global_ui_filtered(ui):
666 def _is_global_ui_filtered(ui):
668 return (
667 return (
669 (ui.section, ui.key) in filtered_repo_uis_keys
668 (ui.section, ui.key) in filtered_repo_uis_keys
670 or ui.section in self._svn_sections)
669 or ui.section in self._svn_sections)
671
670
672 filtered_global_uis = [
671 filtered_global_uis = [
673 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
672 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
674
673
675 return filtered_global_uis + filtered_repo_uis
674 return filtered_global_uis + filtered_repo_uis
676
675
677 def get_global_ui_settings(self, section=None, key=None):
676 def get_global_ui_settings(self, section=None, key=None):
678 return self.global_settings.get_ui(section, key)
677 return self.global_settings.get_ui(section, key)
679
678
680 def get_ui_settings_as_config_obj(self, section=None, key=None):
679 def get_ui_settings_as_config_obj(self, section=None, key=None):
681 config = base.Config()
680 config = base.Config()
682
681
683 ui_settings = self.get_ui_settings(section=section, key=key)
682 ui_settings = self.get_ui_settings(section=section, key=key)
684
683
685 for entry in ui_settings:
684 for entry in ui_settings:
686 config.set(entry.section, entry.key, entry.value)
685 config.set(entry.section, entry.key, entry.value)
687
686
688 return config
687 return config
689
688
690 def get_ui_settings(self, section=None, key=None):
689 def get_ui_settings(self, section=None, key=None):
691 if not self.repo_settings or self.inherit_global_settings:
690 if not self.repo_settings or self.inherit_global_settings:
692 return self.get_global_ui_settings(section, key)
691 return self.get_global_ui_settings(section, key)
693 else:
692 else:
694 return self.get_repo_ui_settings(section, key)
693 return self.get_repo_ui_settings(section, key)
695
694
696 def get_svn_patterns(self, section=None):
695 def get_svn_patterns(self, section=None):
697 if not self.repo_settings:
696 if not self.repo_settings:
698 return self.get_global_ui_settings(section)
697 return self.get_global_ui_settings(section)
699 else:
698 else:
700 return self.get_repo_ui_settings(section)
699 return self.get_repo_ui_settings(section)
701
700
702 @assert_repo_settings
701 @assert_repo_settings
703 def get_repo_general_settings(self):
702 def get_repo_general_settings(self):
704 global_settings = self.global_settings.get_all_settings()
703 global_settings = self.global_settings.get_all_settings()
705 repo_settings = self.repo_settings.get_all_settings()
704 repo_settings = self.repo_settings.get_all_settings()
706 filtered_repo_settings = self._filter_general_settings(repo_settings)
705 filtered_repo_settings = self._filter_general_settings(repo_settings)
707 global_settings.update(filtered_repo_settings)
706 global_settings.update(filtered_repo_settings)
708 return global_settings
707 return global_settings
709
708
710 def get_global_general_settings(self):
709 def get_global_general_settings(self):
711 return self.global_settings.get_all_settings()
710 return self.global_settings.get_all_settings()
712
711
713 def get_general_settings(self):
712 def get_general_settings(self):
714 if not self.repo_settings or self.inherit_global_settings:
713 if not self.repo_settings or self.inherit_global_settings:
715 return self.get_global_general_settings()
714 return self.get_global_general_settings()
716 else:
715 else:
717 return self.get_repo_general_settings()
716 return self.get_repo_general_settings()
718
717
719 def get_repos_location(self):
718 def get_repos_location(self):
720 return self.global_settings.get_ui_by_key('/').ui_value
719 return self.global_settings.get_ui_by_key('/').ui_value
721
720
722 def _filter_ui_settings(self, settings):
721 def _filter_ui_settings(self, settings):
723 filtered_settings = [
722 filtered_settings = [
724 s for s in settings if self._should_keep_setting(s)]
723 s for s in settings if self._should_keep_setting(s)]
725 return filtered_settings
724 return filtered_settings
726
725
727 def _should_keep_setting(self, setting):
726 def _should_keep_setting(self, setting):
728 keep = (
727 keep = (
729 (setting.section, setting.key) in self._ui_settings or
728 (setting.section, setting.key) in self._ui_settings or
730 setting.section in self._svn_sections)
729 setting.section in self._svn_sections)
731 return keep
730 return keep
732
731
733 def _filter_general_settings(self, settings):
732 def _filter_general_settings(self, settings):
734 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
733 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
735 return {
734 return {
736 k: settings[k]
735 k: settings[k]
737 for k in settings if k in keys}
736 for k in settings if k in keys}
738
737
739 def _collect_all_settings(self, global_=False):
738 def _collect_all_settings(self, global_=False):
740 settings = self.global_settings if global_ else self.repo_settings
739 settings = self.global_settings if global_ else self.repo_settings
741 result = {}
740 result = {}
742
741
743 for section, key in self._ui_settings:
742 for section, key in self._ui_settings:
744 ui = settings.get_ui_by_section_and_key(section, key)
743 ui = settings.get_ui_by_section_and_key(section, key)
745 result_key = self._get_form_ui_key(section, key)
744 result_key = self._get_form_ui_key(section, key)
746
745
747 if ui:
746 if ui:
748 if section in ('hooks', 'extensions'):
747 if section in ('hooks', 'extensions'):
749 result[result_key] = ui.ui_active
748 result[result_key] = ui.ui_active
750 elif result_key in ['vcs_git_lfs_enabled']:
749 elif result_key in ['vcs_git_lfs_enabled']:
751 result[result_key] = ui.ui_active
750 result[result_key] = ui.ui_active
752 else:
751 else:
753 result[result_key] = ui.ui_value
752 result[result_key] = ui.ui_value
754
753
755 for name in self.GENERAL_SETTINGS:
754 for name in self.GENERAL_SETTINGS:
756 setting = settings.get_setting_by_name(name)
755 setting = settings.get_setting_by_name(name)
757 if setting:
756 if setting:
758 result_key = 'rhodecode_{}'.format(name)
757 result_key = 'rhodecode_{}'.format(name)
759 result[result_key] = setting.app_settings_value
758 result[result_key] = setting.app_settings_value
760
759
761 return result
760 return result
762
761
763 def _get_form_ui_key(self, section, key):
762 def _get_form_ui_key(self, section, key):
764 return '{section}_{key}'.format(
763 return '{section}_{key}'.format(
765 section=section, key=key.replace('.', '_'))
764 section=section, key=key.replace('.', '_'))
766
765
767 def _create_or_update_ui(
766 def _create_or_update_ui(
768 self, settings, section, key, value=None, active=None):
767 self, settings, section, key, value=None, active=None):
769 ui = settings.get_ui_by_section_and_key(section, key)
768 ui = settings.get_ui_by_section_and_key(section, key)
770 if not ui:
769 if not ui:
771 active = True if active is None else active
770 active = True if active is None else active
772 settings.create_ui_section_value(
771 settings.create_ui_section_value(
773 section, value, key=key, active=active)
772 section, value, key=key, active=active)
774 else:
773 else:
775 if active is not None:
774 if active is not None:
776 ui.ui_active = active
775 ui.ui_active = active
777 if value is not None:
776 if value is not None:
778 ui.ui_value = value
777 ui.ui_value = value
779 Session().add(ui)
778 Session().add(ui)
780
779
781 def _create_svn_settings(self, settings, data):
780 def _create_svn_settings(self, settings, data):
782 svn_settings = {
781 svn_settings = {
783 'new_svn_branch': self.SVN_BRANCH_SECTION,
782 'new_svn_branch': self.SVN_BRANCH_SECTION,
784 'new_svn_tag': self.SVN_TAG_SECTION
783 'new_svn_tag': self.SVN_TAG_SECTION
785 }
784 }
786 for key in svn_settings:
785 for key in svn_settings:
787 if data.get(key):
786 if data.get(key):
788 settings.create_ui_section_value(svn_settings[key], data[key])
787 settings.create_ui_section_value(svn_settings[key], data[key])
789
788
790 def _create_or_update_general_settings(self, settings, data):
789 def _create_or_update_general_settings(self, settings, data):
791 for name in self.GENERAL_SETTINGS:
790 for name in self.GENERAL_SETTINGS:
792 data_key = 'rhodecode_{}'.format(name)
791 data_key = 'rhodecode_{}'.format(name)
793 if data_key not in data:
792 if data_key not in data:
794 raise ValueError(
793 raise ValueError(
795 'The given data does not contain {} key'.format(data_key))
794 'The given data does not contain {} key'.format(data_key))
796 setting = settings.create_or_update_setting(
795 setting = settings.create_or_update_setting(
797 name, data[data_key], 'bool')
796 name, data[data_key], 'bool')
798 Session().add(setting)
797 Session().add(setting)
799
798
800 def _get_settings_keys(self, settings, data):
799 def _get_settings_keys(self, settings, data):
801 data_keys = [self._get_form_ui_key(*s) for s in settings]
800 data_keys = [self._get_form_ui_key(*s) for s in settings]
802 for data_key in data_keys:
801 for data_key in data_keys:
803 if data_key not in data:
802 if data_key not in data:
804 raise ValueError(
803 raise ValueError(
805 'The given data does not contain {} key'.format(data_key))
804 'The given data does not contain {} key'.format(data_key))
806 return data_keys
805 return data_keys
807
806
808 def create_largeobjects_dirs_if_needed(self, repo_store_path):
807 def create_largeobjects_dirs_if_needed(self, repo_store_path):
809 """
808 """
810 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
809 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
811 does a repository scan if enabled in the settings.
810 does a repository scan if enabled in the settings.
812 """
811 """
813
812
814 from rhodecode.lib.vcs.backends.hg import largefiles_store
813 from rhodecode.lib.vcs.backends.hg import largefiles_store
815 from rhodecode.lib.vcs.backends.git import lfs_store
814 from rhodecode.lib.vcs.backends.git import lfs_store
816
815
817 paths = [
816 paths = [
818 largefiles_store(repo_store_path),
817 largefiles_store(repo_store_path),
819 lfs_store(repo_store_path)]
818 lfs_store(repo_store_path)]
820
819
821 for path in paths:
820 for path in paths:
822 if os.path.isdir(path):
821 if os.path.isdir(path):
823 continue
822 continue
824 if os.path.isfile(path):
823 if os.path.isfile(path):
825 continue
824 continue
826 # not a file nor dir, we try to create it
825 # not a file nor dir, we try to create it
827 try:
826 try:
828 os.makedirs(path)
827 os.makedirs(path)
829 except Exception:
828 except Exception:
830 log.warning('Failed to create largefiles dir:%s', path)
829 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,243 +1,245 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import time
22 import time
23 import logging
23 import logging
24 import datetime
24 import datetime
25 import hashlib
25 import hashlib
26 import tempfile
26 import tempfile
27 from os.path import join as jn
27 from os.path import join as jn
28
28
29 from tempfile import _RandomNameSequence
29 from tempfile import _RandomNameSequence
30
30
31 import pytest
31 import pytest
32
32
33 from rhodecode.model.db import User
33 from rhodecode.model.db import User
34 from rhodecode.lib import auth
34 from rhodecode.lib import auth
35 from rhodecode.lib import helpers as h
35 from rhodecode.lib import helpers as h
36 from rhodecode.lib.helpers import flash, link_to
36 from rhodecode.lib.helpers import flash, link_to
37 from rhodecode.lib.utils2 import safe_str
37 from rhodecode.lib.utils2 import safe_str
38
38
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42 __all__ = [
42 __all__ = [
43 'get_new_dir', 'TestController',
43 'get_new_dir', 'TestController',
44 'link_to', 'clear_all_caches',
44 'link_to', 'clear_cache_regions',
45 'assert_session_flash', 'login_user', 'no_newline_id_generator',
45 'assert_session_flash', 'login_user', 'no_newline_id_generator',
46 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
46 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
47 'NEW_HG_REPO', 'NEW_GIT_REPO',
47 'NEW_HG_REPO', 'NEW_GIT_REPO',
48 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
48 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
49 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
49 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
50 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
50 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
51 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
51 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
52 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
52 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
53 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
53 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
54 ]
54 ]
55
55
56
56
57 # SOME GLOBALS FOR TESTS
57 # SOME GLOBALS FOR TESTS
58 TEST_DIR = tempfile.gettempdir()
58 TEST_DIR = tempfile.gettempdir()
59
59
60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_%s' % _RandomNameSequence().next())
60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_%s' % _RandomNameSequence().next())
61 TEST_USER_ADMIN_LOGIN = 'test_admin'
61 TEST_USER_ADMIN_LOGIN = 'test_admin'
62 TEST_USER_ADMIN_PASS = 'test12'
62 TEST_USER_ADMIN_PASS = 'test12'
63 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
63 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
64
64
65 TEST_USER_REGULAR_LOGIN = 'test_regular'
65 TEST_USER_REGULAR_LOGIN = 'test_regular'
66 TEST_USER_REGULAR_PASS = 'test12'
66 TEST_USER_REGULAR_PASS = 'test12'
67 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
67 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
68
68
69 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
69 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
70 TEST_USER_REGULAR2_PASS = 'test12'
70 TEST_USER_REGULAR2_PASS = 'test12'
71 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
71 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
72
72
73 HG_REPO = 'vcs_test_hg'
73 HG_REPO = 'vcs_test_hg'
74 GIT_REPO = 'vcs_test_git'
74 GIT_REPO = 'vcs_test_git'
75 SVN_REPO = 'vcs_test_svn'
75 SVN_REPO = 'vcs_test_svn'
76
76
77 NEW_HG_REPO = 'vcs_test_hg_new'
77 NEW_HG_REPO = 'vcs_test_hg_new'
78 NEW_GIT_REPO = 'vcs_test_git_new'
78 NEW_GIT_REPO = 'vcs_test_git_new'
79
79
80 HG_FORK = 'vcs_test_hg_fork'
80 HG_FORK = 'vcs_test_hg_fork'
81 GIT_FORK = 'vcs_test_git_fork'
81 GIT_FORK = 'vcs_test_git_fork'
82
82
83 ## VCS
83 ## VCS
84 SCM_TESTS = ['hg', 'git']
84 SCM_TESTS = ['hg', 'git']
85 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
85 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
86
86
87 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
87 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
90
90
91 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
91 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
94
94
95 TEST_REPO_PREFIX = 'vcs-test'
95 TEST_REPO_PREFIX = 'vcs-test'
96
96
97
97
98 def clear_all_caches():
98 def clear_cache_regions(regions=None):
99 from beaker.cache import cache_managers
99 # dogpile
100 for _cache in cache_managers.values():
100 from rhodecode.lib.rc_cache import region_meta
101 _cache.clear()
101 for region_name, region in region_meta.dogpile_cache_regions.items():
102 if not regions or region_name in regions:
103 region.invalidate()
102
104
103
105
104 def get_new_dir(title):
106 def get_new_dir(title):
105 """
107 """
106 Returns always new directory path.
108 Returns always new directory path.
107 """
109 """
108 from rhodecode.tests.vcs.utils import get_normalized_path
110 from rhodecode.tests.vcs.utils import get_normalized_path
109 name_parts = [TEST_REPO_PREFIX]
111 name_parts = [TEST_REPO_PREFIX]
110 if title:
112 if title:
111 name_parts.append(title)
113 name_parts.append(title)
112 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
114 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
113 name_parts.append(hex_str)
115 name_parts.append(hex_str)
114 name = '-'.join(name_parts)
116 name = '-'.join(name_parts)
115 path = os.path.join(TEST_DIR, name)
117 path = os.path.join(TEST_DIR, name)
116 return get_normalized_path(path)
118 return get_normalized_path(path)
117
119
118
120
119 def repo_id_generator(name):
121 def repo_id_generator(name):
120 numeric_hash = 0
122 numeric_hash = 0
121 for char in name:
123 for char in name:
122 numeric_hash += (ord(char))
124 numeric_hash += (ord(char))
123 return numeric_hash
125 return numeric_hash
124
126
125
127
126 @pytest.mark.usefixtures('app', 'index_location')
128 @pytest.mark.usefixtures('app', 'index_location')
127 class TestController(object):
129 class TestController(object):
128
130
129 maxDiff = None
131 maxDiff = None
130
132
131 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
133 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
132 password=TEST_USER_ADMIN_PASS):
134 password=TEST_USER_ADMIN_PASS):
133 self._logged_username = username
135 self._logged_username = username
134 self._session = login_user_session(self.app, username, password)
136 self._session = login_user_session(self.app, username, password)
135 self.csrf_token = auth.get_csrf_token(self._session)
137 self.csrf_token = auth.get_csrf_token(self._session)
136
138
137 return self._session['rhodecode_user']
139 return self._session['rhodecode_user']
138
140
139 def logout_user(self):
141 def logout_user(self):
140 logout_user_session(self.app, auth.get_csrf_token(self._session))
142 logout_user_session(self.app, auth.get_csrf_token(self._session))
141 self.csrf_token = None
143 self.csrf_token = None
142 self._logged_username = None
144 self._logged_username = None
143 self._session = None
145 self._session = None
144
146
145 def _get_logged_user(self):
147 def _get_logged_user(self):
146 return User.get_by_username(self._logged_username)
148 return User.get_by_username(self._logged_username)
147
149
148
150
149 def login_user_session(
151 def login_user_session(
150 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
152 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
151
153
152 response = app.post(
154 response = app.post(
153 h.route_path('login'),
155 h.route_path('login'),
154 {'username': username, 'password': password})
156 {'username': username, 'password': password})
155 if 'invalid user name' in response.body:
157 if 'invalid user name' in response.body:
156 pytest.fail('could not login using %s %s' % (username, password))
158 pytest.fail('could not login using %s %s' % (username, password))
157
159
158 assert response.status == '302 Found'
160 assert response.status == '302 Found'
159 response = response.follow()
161 response = response.follow()
160 assert response.status == '200 OK'
162 assert response.status == '200 OK'
161
163
162 session = response.get_session_from_response()
164 session = response.get_session_from_response()
163 assert 'rhodecode_user' in session
165 assert 'rhodecode_user' in session
164 rc_user = session['rhodecode_user']
166 rc_user = session['rhodecode_user']
165 assert rc_user.get('username') == username
167 assert rc_user.get('username') == username
166 assert rc_user.get('is_authenticated')
168 assert rc_user.get('is_authenticated')
167
169
168 return session
170 return session
169
171
170
172
171 def logout_user_session(app, csrf_token):
173 def logout_user_session(app, csrf_token):
172 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
174 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
173
175
174
176
175 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
177 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
176 password=TEST_USER_ADMIN_PASS):
178 password=TEST_USER_ADMIN_PASS):
177 return login_user_session(app, username, password)['rhodecode_user']
179 return login_user_session(app, username, password)['rhodecode_user']
178
180
179
181
180 def assert_session_flash(response, msg=None, category=None, no_=None):
182 def assert_session_flash(response, msg=None, category=None, no_=None):
181 """
183 """
182 Assert on a flash message in the current session.
184 Assert on a flash message in the current session.
183
185
184 :param response: Response from give calll, it will contain flash
186 :param response: Response from give calll, it will contain flash
185 messages or bound session with them.
187 messages or bound session with them.
186 :param msg: The expected message. Will be evaluated if a
188 :param msg: The expected message. Will be evaluated if a
187 :class:`LazyString` is passed in.
189 :class:`LazyString` is passed in.
188 :param category: Optional. If passed, the message category will be
190 :param category: Optional. If passed, the message category will be
189 checked as well.
191 checked as well.
190 :param no_: Optional. If passed, the message will be checked to NOT
192 :param no_: Optional. If passed, the message will be checked to NOT
191 be in the flash session
193 be in the flash session
192 """
194 """
193 if msg is None and no_ is None:
195 if msg is None and no_ is None:
194 raise ValueError("Parameter msg or no_ is required.")
196 raise ValueError("Parameter msg or no_ is required.")
195
197
196 if msg and no_:
198 if msg and no_:
197 raise ValueError("Please specify either msg or no_, but not both")
199 raise ValueError("Please specify either msg or no_, but not both")
198
200
199 session = response.get_session_from_response()
201 session = response.get_session_from_response()
200 messages = flash.pop_messages(session=session)
202 messages = flash.pop_messages(session=session)
201 msg = _eval_if_lazy(msg)
203 msg = _eval_if_lazy(msg)
202
204
203 if no_:
205 if no_:
204 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
206 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
205 else:
207 else:
206 error_msg = 'unable to find message `%s` in empty flash list' % msg
208 error_msg = 'unable to find message `%s` in empty flash list' % msg
207 assert messages, error_msg
209 assert messages, error_msg
208 message = messages[0]
210 message = messages[0]
209
211
210 message_text = _eval_if_lazy(message.message) or ''
212 message_text = _eval_if_lazy(message.message) or ''
211
213
212 if no_:
214 if no_:
213 if no_ in message_text:
215 if no_ in message_text:
214 msg = u'msg `%s` found in session flash.' % (no_,)
216 msg = u'msg `%s` found in session flash.' % (no_,)
215 pytest.fail(safe_str(msg))
217 pytest.fail(safe_str(msg))
216 else:
218 else:
217 if msg not in message_text:
219 if msg not in message_text:
218 fail_msg = u'msg `%s` not found in session ' \
220 fail_msg = u'msg `%s` not found in session ' \
219 u'flash: got `%s` (type:%s) instead' % (
221 u'flash: got `%s` (type:%s) instead' % (
220 msg, message_text, type(message_text))
222 msg, message_text, type(message_text))
221
223
222 pytest.fail(safe_str(fail_msg))
224 pytest.fail(safe_str(fail_msg))
223 if category:
225 if category:
224 assert category == message.category
226 assert category == message.category
225
227
226
228
227 def _eval_if_lazy(value):
229 def _eval_if_lazy(value):
228 return value.eval() if hasattr(value, 'eval') else value
230 return value.eval() if hasattr(value, 'eval') else value
229
231
230
232
231 def no_newline_id_generator(test_name):
233 def no_newline_id_generator(test_name):
232 """
234 """
233 Generates a test name without spaces or newlines characters. Used for
235 Generates a test name without spaces or newlines characters. Used for
234 nicer output of progress of test
236 nicer output of progress of test
235 """
237 """
236 org_name = test_name
238 org_name = test_name
237 test_name = test_name\
239 test_name = test_name\
238 .replace('\n', '_N') \
240 .replace('\n', '_N') \
239 .replace('\r', '_N') \
241 .replace('\r', '_N') \
240 .replace('\t', '_T') \
242 .replace('\t', '_T') \
241 .replace(' ', '_S')
243 .replace(' ', '_S')
242
244
243 return test_name or 'test-with-empty-name'
245 return test_name or 'test-with-empty-name'
@@ -1,446 +1,445 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import multiprocessing
22 import multiprocessing
23 import os
23 import os
24
24
25 import mock
25 import mock
26 import py
26 import py
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib import caching_query
29 from rhodecode.lib import caching_query
30 from rhodecode.lib import utils
30 from rhodecode.lib import utils
31 from rhodecode.lib.utils2 import md5
31 from rhodecode.lib.utils2 import md5
32 from rhodecode.model import settings
32 from rhodecode.model import settings
33 from rhodecode.model import db
33 from rhodecode.model import db
34 from rhodecode.model import meta
34 from rhodecode.model import meta
35 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.repo import RepoModel
36 from rhodecode.model.repo_group import RepoGroupModel
36 from rhodecode.model.repo_group import RepoGroupModel
37 from rhodecode.model.scm import ScmModel
37 from rhodecode.model.scm import ScmModel
38 from rhodecode.model.settings import UiSetting, SettingsModel
38 from rhodecode.model.settings import UiSetting, SettingsModel
39 from rhodecode.tests.fixture import Fixture
39 from rhodecode.tests.fixture import Fixture
40
40
41
41
42 fixture = Fixture()
42 fixture = Fixture()
43
43
44
44
45 def extract_hooks(config):
45 def extract_hooks(config):
46 """Return a dictionary with the hook entries of the given config."""
46 """Return a dictionary with the hook entries of the given config."""
47 hooks = {}
47 hooks = {}
48 config_items = config.serialize()
48 config_items = config.serialize()
49 for section, name, value in config_items:
49 for section, name, value in config_items:
50 if section != 'hooks':
50 if section != 'hooks':
51 continue
51 continue
52 hooks[name] = value
52 hooks[name] = value
53
53
54 return hooks
54 return hooks
55
55
56
56
57 def disable_hooks(request, hooks):
57 def disable_hooks(request, hooks):
58 """Disables the given hooks from the UI settings."""
58 """Disables the given hooks from the UI settings."""
59 session = meta.Session()
59 session = meta.Session()
60
60
61 model = SettingsModel()
61 model = SettingsModel()
62 for hook_key in hooks:
62 for hook_key in hooks:
63 sett = model.get_ui_by_key(hook_key)
63 sett = model.get_ui_by_key(hook_key)
64 sett.ui_active = False
64 sett.ui_active = False
65 session.add(sett)
65 session.add(sett)
66
66
67 # Invalidate cache
67 # Invalidate cache
68 ui_settings = session.query(db.RhodeCodeUi).options(
68 ui_settings = session.query(db.RhodeCodeUi).options(
69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
70 ui_settings.invalidate()
70 ui_settings.invalidate()
71
71
72 ui_settings = session.query(db.RhodeCodeUi).options(
72 ui_settings = session.query(db.RhodeCodeUi).options(
73 caching_query.FromCache(
73 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
74 'sql_cache_short', 'get_hook_settings', 'get_hook_settings'))
75 ui_settings.invalidate()
74 ui_settings.invalidate()
76
75
77 @request.addfinalizer
76 @request.addfinalizer
78 def rollback():
77 def rollback():
79 session.rollback()
78 session.rollback()
80
79
81
80
82 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
81 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
83 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
82 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
84 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
83 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
85 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
84 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
86 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
85 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
87 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
86 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
88 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
87 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
89
88
90 HG_HOOKS = frozenset(
89 HG_HOOKS = frozenset(
91 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
90 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
92 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
91 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
93
92
94
93
95 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
94 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
96 ([], HG_HOOKS),
95 ([], HG_HOOKS),
97 (HG_HOOKS, []),
96 (HG_HOOKS, []),
98
97
99 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
98 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
100
99
101 # When a pull/push hook is disabled, its pre-pull/push counterpart should
100 # When a pull/push hook is disabled, its pre-pull/push counterpart should
102 # be disabled too.
101 # be disabled too.
103 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
102 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
104 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
103 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
105 HOOK_PUSH_KEY]),
104 HOOK_PUSH_KEY]),
106 ])
105 ])
107 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
106 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
108 expected_hooks):
107 expected_hooks):
109 disable_hooks(request, disabled_hooks)
108 disable_hooks(request, disabled_hooks)
110
109
111 config = utils.make_db_config()
110 config = utils.make_db_config()
112 hooks = extract_hooks(config)
111 hooks = extract_hooks(config)
113
112
114 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
113 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
115
114
116
115
117 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
116 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
118 ([], ['pull', 'push']),
117 ([], ['pull', 'push']),
119 ([HOOK_PUSH], ['pull']),
118 ([HOOK_PUSH], ['pull']),
120 ([HOOK_PULL], ['push']),
119 ([HOOK_PULL], ['push']),
121 ([HOOK_PULL, HOOK_PUSH], []),
120 ([HOOK_PULL, HOOK_PUSH], []),
122 ])
121 ])
123 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
122 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
124 hook_keys = (HOOK_PUSH, HOOK_PULL)
123 hook_keys = (HOOK_PUSH, HOOK_PULL)
125 ui_settings = [
124 ui_settings = [
126 ('hooks', key, 'some value', key not in disabled_hooks)
125 ('hooks', key, 'some value', key not in disabled_hooks)
127 for key in hook_keys]
126 for key in hook_keys]
128
127
129 result = utils.get_enabled_hook_classes(ui_settings)
128 result = utils.get_enabled_hook_classes(ui_settings)
130 assert sorted(result) == expected_hooks
129 assert sorted(result) == expected_hooks
131
130
132
131
133 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
132 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
134 _stub_git_repo(tmpdir.ensure('repo', dir=True))
133 _stub_git_repo(tmpdir.ensure('repo', dir=True))
135 repos = list(utils.get_filesystem_repos(str(tmpdir)))
134 repos = list(utils.get_filesystem_repos(str(tmpdir)))
136 assert repos == [('repo', ('git', tmpdir.join('repo')))]
135 assert repos == [('repo', ('git', tmpdir.join('repo')))]
137
136
138
137
139 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
138 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
140 tmpdir.ensure('not-a-repo', dir=True)
139 tmpdir.ensure('not-a-repo', dir=True)
141 repos = list(utils.get_filesystem_repos(str(tmpdir)))
140 repos = list(utils.get_filesystem_repos(str(tmpdir)))
142 assert repos == []
141 assert repos == []
143
142
144
143
145 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
144 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
146 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
145 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
147 repos = list(utils.get_filesystem_repos(str(tmpdir)))
146 repos = list(utils.get_filesystem_repos(str(tmpdir)))
148 assert repos == []
147 assert repos == []
149
148
150
149
151 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
150 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
152 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
151 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
153 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
152 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
154 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
153 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
155
154
156
155
157 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
156 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
158 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
157 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
159 repos = list(utils.get_filesystem_repos(str(tmpdir)))
158 repos = list(utils.get_filesystem_repos(str(tmpdir)))
160 assert repos == []
159 assert repos == []
161
160
162
161
163 def test_get_filesystem_repos_skips_files(tmpdir):
162 def test_get_filesystem_repos_skips_files(tmpdir):
164 tmpdir.ensure('test-file')
163 tmpdir.ensure('test-file')
165 repos = list(utils.get_filesystem_repos(str(tmpdir)))
164 repos = list(utils.get_filesystem_repos(str(tmpdir)))
166 assert repos == []
165 assert repos == []
167
166
168
167
169 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
168 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
170 removed_repo_name = 'rm__00000000_000000_000000__.stub'
169 removed_repo_name = 'rm__00000000_000000_000000__.stub'
171 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
170 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
172 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
171 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
173 repos = list(utils.get_filesystem_repos(str(tmpdir)))
172 repos = list(utils.get_filesystem_repos(str(tmpdir)))
174 assert repos == []
173 assert repos == []
175
174
176
175
177 def _stub_git_repo(repo_path):
176 def _stub_git_repo(repo_path):
178 """
177 """
179 Make `repo_path` look like a Git repository.
178 Make `repo_path` look like a Git repository.
180 """
179 """
181 repo_path.ensure('.git', dir=True)
180 repo_path.ensure('.git', dir=True)
182
181
183
182
184 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
183 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
185 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
184 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
186 tmpdir.ensure('test-file')
185 tmpdir.ensure('test-file')
187 dirpaths = utils._get_dirpaths(str_class(tmpdir))
186 dirpaths = utils._get_dirpaths(str_class(tmpdir))
188 assert dirpaths == ['test-file']
187 assert dirpaths == ['test-file']
189
188
190
189
191 def test_get_dirpaths_returns_all_paths_bytes(
190 def test_get_dirpaths_returns_all_paths_bytes(
192 tmpdir, platform_encodes_filenames):
191 tmpdir, platform_encodes_filenames):
193 if platform_encodes_filenames:
192 if platform_encodes_filenames:
194 pytest.skip("This platform seems to encode filenames.")
193 pytest.skip("This platform seems to encode filenames.")
195 tmpdir.ensure('repo-a-umlaut-\xe4')
194 tmpdir.ensure('repo-a-umlaut-\xe4')
196 dirpaths = utils._get_dirpaths(str(tmpdir))
195 dirpaths = utils._get_dirpaths(str(tmpdir))
197 assert dirpaths == ['repo-a-umlaut-\xe4']
196 assert dirpaths == ['repo-a-umlaut-\xe4']
198
197
199
198
200 def test_get_dirpaths_skips_paths_it_cannot_decode(
199 def test_get_dirpaths_skips_paths_it_cannot_decode(
201 tmpdir, platform_encodes_filenames):
200 tmpdir, platform_encodes_filenames):
202 if platform_encodes_filenames:
201 if platform_encodes_filenames:
203 pytest.skip("This platform seems to encode filenames.")
202 pytest.skip("This platform seems to encode filenames.")
204 path_with_latin1 = 'repo-a-umlaut-\xe4'
203 path_with_latin1 = 'repo-a-umlaut-\xe4'
205 tmpdir.ensure(path_with_latin1)
204 tmpdir.ensure(path_with_latin1)
206 dirpaths = utils._get_dirpaths(unicode(tmpdir))
205 dirpaths = utils._get_dirpaths(unicode(tmpdir))
207 assert dirpaths == []
206 assert dirpaths == []
208
207
209
208
210 @pytest.fixture(scope='session')
209 @pytest.fixture(scope='session')
211 def platform_encodes_filenames():
210 def platform_encodes_filenames():
212 """
211 """
213 Boolean indicator if the current platform changes filename encodings.
212 Boolean indicator if the current platform changes filename encodings.
214 """
213 """
215 path_with_latin1 = 'repo-a-umlaut-\xe4'
214 path_with_latin1 = 'repo-a-umlaut-\xe4'
216 tmpdir = py.path.local.mkdtemp()
215 tmpdir = py.path.local.mkdtemp()
217 tmpdir.ensure(path_with_latin1)
216 tmpdir.ensure(path_with_latin1)
218 read_path = tmpdir.listdir()[0].basename
217 read_path = tmpdir.listdir()[0].basename
219 tmpdir.remove()
218 tmpdir.remove()
220 return path_with_latin1 != read_path
219 return path_with_latin1 != read_path
221
220
222
221
223
222
224
223
225 def test_repo2db_mapper_groups(repo_groups):
224 def test_repo2db_mapper_groups(repo_groups):
226 session = meta.Session()
225 session = meta.Session()
227 zombie_group, parent_group, child_group = repo_groups
226 zombie_group, parent_group, child_group = repo_groups
228 zombie_path = os.path.join(
227 zombie_path = os.path.join(
229 RepoGroupModel().repos_path, zombie_group.full_path)
228 RepoGroupModel().repos_path, zombie_group.full_path)
230 os.rmdir(zombie_path)
229 os.rmdir(zombie_path)
231
230
232 # Avoid removing test repos when calling repo2db_mapper
231 # Avoid removing test repos when calling repo2db_mapper
233 repo_list = {
232 repo_list = {
234 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
233 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
235 }
234 }
236 utils.repo2db_mapper(repo_list, remove_obsolete=True)
235 utils.repo2db_mapper(repo_list, remove_obsolete=True)
237
236
238 groups_in_db = session.query(db.RepoGroup).all()
237 groups_in_db = session.query(db.RepoGroup).all()
239 assert child_group in groups_in_db
238 assert child_group in groups_in_db
240 assert parent_group in groups_in_db
239 assert parent_group in groups_in_db
241 assert zombie_path not in groups_in_db
240 assert zombie_path not in groups_in_db
242
241
243
242
244 def test_repo2db_mapper_enables_largefiles(backend):
243 def test_repo2db_mapper_enables_largefiles(backend):
245 repo = backend.create_repo()
244 repo = backend.create_repo()
246 repo_list = {repo.repo_name: 'test'}
245 repo_list = {repo.repo_name: 'test'}
247 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
246 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
248 utils.repo2db_mapper(repo_list, remove_obsolete=False)
247 utils.repo2db_mapper(repo_list, remove_obsolete=False)
249 _, kwargs = scm_mock.call_args
248 _, kwargs = scm_mock.call_args
250 assert kwargs['config'].get('extensions', 'largefiles') == ''
249 assert kwargs['config'].get('extensions', 'largefiles') == ''
251
250
252
251
253 @pytest.mark.backends("git", "svn")
252 @pytest.mark.backends("git", "svn")
254 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
253 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
255 repo = backend.create_repo()
254 repo = backend.create_repo()
256 repo_list = {repo.repo_name: 'test'}
255 repo_list = {repo.repo_name: 'test'}
257 utils.repo2db_mapper(repo_list, remove_obsolete=False)
256 utils.repo2db_mapper(repo_list, remove_obsolete=False)
258
257
259
258
260 @pytest.mark.backends("git", "svn")
259 @pytest.mark.backends("git", "svn")
261 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
260 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
262 repo = backend.create_repo()
261 repo = backend.create_repo()
263 RepoModel().delete(repo, fs_remove=False)
262 RepoModel().delete(repo, fs_remove=False)
264 meta.Session().commit()
263 meta.Session().commit()
265 repo_list = {repo.repo_name: repo.scm_instance()}
264 repo_list = {repo.repo_name: repo.scm_instance()}
266 utils.repo2db_mapper(repo_list, remove_obsolete=False)
265 utils.repo2db_mapper(repo_list, remove_obsolete=False)
267
266
268
267
269 class TestPasswordChanged(object):
268 class TestPasswordChanged(object):
270 def setup(self):
269 def setup(self):
271 self.session = {
270 self.session = {
272 'rhodecode_user': {
271 'rhodecode_user': {
273 'password': '0cc175b9c0f1b6a831c399e269772661'
272 'password': '0cc175b9c0f1b6a831c399e269772661'
274 }
273 }
275 }
274 }
276 self.auth_user = mock.Mock()
275 self.auth_user = mock.Mock()
277 self.auth_user.userame = 'test'
276 self.auth_user.userame = 'test'
278 self.auth_user.password = 'abc123'
277 self.auth_user.password = 'abc123'
279
278
280 def test_returns_false_for_default_user(self):
279 def test_returns_false_for_default_user(self):
281 self.auth_user.username = db.User.DEFAULT_USER
280 self.auth_user.username = db.User.DEFAULT_USER
282 result = utils.password_changed(self.auth_user, self.session)
281 result = utils.password_changed(self.auth_user, self.session)
283 assert result is False
282 assert result is False
284
283
285 def test_returns_false_if_password_was_not_changed(self):
284 def test_returns_false_if_password_was_not_changed(self):
286 self.session['rhodecode_user']['password'] = md5(
285 self.session['rhodecode_user']['password'] = md5(
287 self.auth_user.password)
286 self.auth_user.password)
288 result = utils.password_changed(self.auth_user, self.session)
287 result = utils.password_changed(self.auth_user, self.session)
289 assert result is False
288 assert result is False
290
289
291 def test_returns_true_if_password_was_changed(self):
290 def test_returns_true_if_password_was_changed(self):
292 result = utils.password_changed(self.auth_user, self.session)
291 result = utils.password_changed(self.auth_user, self.session)
293 assert result is True
292 assert result is True
294
293
295 def test_returns_true_if_auth_user_password_is_empty(self):
294 def test_returns_true_if_auth_user_password_is_empty(self):
296 self.auth_user.password = None
295 self.auth_user.password = None
297 result = utils.password_changed(self.auth_user, self.session)
296 result = utils.password_changed(self.auth_user, self.session)
298 assert result is True
297 assert result is True
299
298
300 def test_returns_true_if_session_password_is_empty(self):
299 def test_returns_true_if_session_password_is_empty(self):
301 self.session['rhodecode_user'].pop('password')
300 self.session['rhodecode_user'].pop('password')
302 result = utils.password_changed(self.auth_user, self.session)
301 result = utils.password_changed(self.auth_user, self.session)
303 assert result is True
302 assert result is True
304
303
305
304
306 class TestReadOpensourceLicenses(object):
305 class TestReadOpensourceLicenses(object):
307 def test_success(self):
306 def test_success(self):
308 utils._license_cache = None
307 utils._license_cache = None
309 json_data = '''
308 json_data = '''
310 {
309 {
311 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
310 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
312 "python2.7-Markdown-2.6.2": {
311 "python2.7-Markdown-2.6.2": {
313 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
312 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
314 }
313 }
315 }
314 }
316 '''
315 '''
317 resource_string_patch = mock.patch.object(
316 resource_string_patch = mock.patch.object(
318 utils.pkg_resources, 'resource_string', return_value=json_data)
317 utils.pkg_resources, 'resource_string', return_value=json_data)
319 with resource_string_patch:
318 with resource_string_patch:
320 result = utils.read_opensource_licenses()
319 result = utils.read_opensource_licenses()
321 assert result == json.loads(json_data)
320 assert result == json.loads(json_data)
322
321
323 def test_caching(self):
322 def test_caching(self):
324 utils._license_cache = {
323 utils._license_cache = {
325 "python2.7-pytest-2.7.1": {
324 "python2.7-pytest-2.7.1": {
326 "UNKNOWN": None
325 "UNKNOWN": None
327 },
326 },
328 "python2.7-Markdown-2.6.2": {
327 "python2.7-Markdown-2.6.2": {
329 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
328 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
330 }
329 }
331 }
330 }
332 resource_patch = mock.patch.object(
331 resource_patch = mock.patch.object(
333 utils.pkg_resources, 'resource_string', side_effect=Exception)
332 utils.pkg_resources, 'resource_string', side_effect=Exception)
334 json_patch = mock.patch.object(
333 json_patch = mock.patch.object(
335 utils.json, 'loads', side_effect=Exception)
334 utils.json, 'loads', side_effect=Exception)
336
335
337 with resource_patch as resource_mock, json_patch as json_mock:
336 with resource_patch as resource_mock, json_patch as json_mock:
338 result = utils.read_opensource_licenses()
337 result = utils.read_opensource_licenses()
339
338
340 assert resource_mock.call_count == 0
339 assert resource_mock.call_count == 0
341 assert json_mock.call_count == 0
340 assert json_mock.call_count == 0
342 assert result == utils._license_cache
341 assert result == utils._license_cache
343
342
344 def test_licenses_file_contains_no_unknown_licenses(self):
343 def test_licenses_file_contains_no_unknown_licenses(self):
345 utils._license_cache = None
344 utils._license_cache = None
346 result = utils.read_opensource_licenses()
345 result = utils.read_opensource_licenses()
347 license_names = []
346 license_names = []
348 for licenses in result.values():
347 for licenses in result.values():
349 license_names.extend(licenses.keys())
348 license_names.extend(licenses.keys())
350 assert 'UNKNOWN' not in license_names
349 assert 'UNKNOWN' not in license_names
351
350
352
351
353 class TestMakeDbConfig(object):
352 class TestMakeDbConfig(object):
354 def test_data_from_config_data_from_db_returned(self):
353 def test_data_from_config_data_from_db_returned(self):
355 test_data = [
354 test_data = [
356 ('section1', 'option1', 'value1'),
355 ('section1', 'option1', 'value1'),
357 ('section2', 'option2', 'value2'),
356 ('section2', 'option2', 'value2'),
358 ('section3', 'option3', 'value3'),
357 ('section3', 'option3', 'value3'),
359 ]
358 ]
360 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
359 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
361 config_mock.return_value = test_data
360 config_mock.return_value = test_data
362 kwargs = {'clear_session': False, 'repo': 'test_repo'}
361 kwargs = {'clear_session': False, 'repo': 'test_repo'}
363 result = utils.make_db_config(**kwargs)
362 result = utils.make_db_config(**kwargs)
364 config_mock.assert_called_once_with(**kwargs)
363 config_mock.assert_called_once_with(**kwargs)
365 for section, option, expected_value in test_data:
364 for section, option, expected_value in test_data:
366 value = result.get(section, option)
365 value = result.get(section, option)
367 assert value == expected_value
366 assert value == expected_value
368
367
369
368
370 class TestConfigDataFromDb(object):
369 class TestConfigDataFromDb(object):
371 def test_config_data_from_db_returns_active_settings(self):
370 def test_config_data_from_db_returns_active_settings(self):
372 test_data = [
371 test_data = [
373 UiSetting('section1', 'option1', 'value1', True),
372 UiSetting('section1', 'option1', 'value1', True),
374 UiSetting('section2', 'option2', 'value2', True),
373 UiSetting('section2', 'option2', 'value2', True),
375 UiSetting('section3', 'option3', 'value3', False),
374 UiSetting('section3', 'option3', 'value3', False),
376 ]
375 ]
377 repo_name = 'test_repo'
376 repo_name = 'test_repo'
378
377
379 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
378 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
380 hooks_patch = mock.patch.object(
379 hooks_patch = mock.patch.object(
381 utils, 'get_enabled_hook_classes',
380 utils, 'get_enabled_hook_classes',
382 return_value=['pull', 'push', 'repo_size'])
381 return_value=['pull', 'push', 'repo_size'])
383 with model_patch as model_mock, hooks_patch:
382 with model_patch as model_mock, hooks_patch:
384 instance_mock = mock.Mock()
383 instance_mock = mock.Mock()
385 model_mock.return_value = instance_mock
384 model_mock.return_value = instance_mock
386 instance_mock.get_ui_settings.return_value = test_data
385 instance_mock.get_ui_settings.return_value = test_data
387 result = utils.config_data_from_db(
386 result = utils.config_data_from_db(
388 clear_session=False, repo=repo_name)
387 clear_session=False, repo=repo_name)
389
388
390 self._assert_repo_name_passed(model_mock, repo_name)
389 self._assert_repo_name_passed(model_mock, repo_name)
391
390
392 expected_result = [
391 expected_result = [
393 ('section1', 'option1', 'value1'),
392 ('section1', 'option1', 'value1'),
394 ('section2', 'option2', 'value2'),
393 ('section2', 'option2', 'value2'),
395 ]
394 ]
396 assert result == expected_result
395 assert result == expected_result
397
396
398 def _assert_repo_name_passed(self, model_mock, repo_name):
397 def _assert_repo_name_passed(self, model_mock, repo_name):
399 assert model_mock.call_count == 1
398 assert model_mock.call_count == 1
400 call_args, call_kwargs = model_mock.call_args
399 call_args, call_kwargs = model_mock.call_args
401 assert call_kwargs['repo'] == repo_name
400 assert call_kwargs['repo'] == repo_name
402
401
403
402
404 class TestIsDirWritable(object):
403 class TestIsDirWritable(object):
405 def test_returns_false_when_not_writable(self):
404 def test_returns_false_when_not_writable(self):
406 with mock.patch('__builtin__.open', side_effect=OSError):
405 with mock.patch('__builtin__.open', side_effect=OSError):
407 assert not utils._is_dir_writable('/stub-path')
406 assert not utils._is_dir_writable('/stub-path')
408
407
409 def test_returns_true_when_writable(self, tmpdir):
408 def test_returns_true_when_writable(self, tmpdir):
410 assert utils._is_dir_writable(str(tmpdir))
409 assert utils._is_dir_writable(str(tmpdir))
411
410
412 def test_is_safe_against_race_conditions(self, tmpdir):
411 def test_is_safe_against_race_conditions(self, tmpdir):
413 workers = multiprocessing.Pool()
412 workers = multiprocessing.Pool()
414 directories = [str(tmpdir)] * 10
413 directories = [str(tmpdir)] * 10
415 workers.map(utils._is_dir_writable, directories)
414 workers.map(utils._is_dir_writable, directories)
416
415
417
416
418 class TestGetEnabledHooks(object):
417 class TestGetEnabledHooks(object):
419 def test_only_active_hooks_are_enabled(self):
418 def test_only_active_hooks_are_enabled(self):
420 ui_settings = [
419 ui_settings = [
421 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
420 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
422 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
421 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
423 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
422 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
424 ]
423 ]
425 result = utils.get_enabled_hook_classes(ui_settings)
424 result = utils.get_enabled_hook_classes(ui_settings)
426 assert result == ['push', 'repo_size']
425 assert result == ['push', 'repo_size']
427
426
428 def test_all_hooks_are_enabled(self):
427 def test_all_hooks_are_enabled(self):
429 ui_settings = [
428 ui_settings = [
430 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
429 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
431 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
430 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
432 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
431 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
433 ]
432 ]
434 result = utils.get_enabled_hook_classes(ui_settings)
433 result = utils.get_enabled_hook_classes(ui_settings)
435 assert result == ['push', 'repo_size', 'pull']
434 assert result == ['push', 'repo_size', 'pull']
436
435
437 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
436 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
438 ui_settings = []
437 ui_settings = []
439 result = utils.get_enabled_hook_classes(ui_settings)
438 result = utils.get_enabled_hook_classes(ui_settings)
440 assert result == []
439 assert result == []
441
440
442
441
443 def test_obfuscate_url_pw():
442 def test_obfuscate_url_pw():
444 from rhodecode.lib.utils2 import obfuscate_url_pw
443 from rhodecode.lib.utils2 import obfuscate_url_pw
445 engine = u'/home/repos/malmΓΆ'
444 engine = u'/home/repos/malmΓΆ'
446 assert obfuscate_url_pw(engine) No newline at end of file
445 assert obfuscate_url_pw(engine)
@@ -1,671 +1,674 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 0.0.0.0
46 host = 0.0.0.0
47 port = 5000
47 port = 5000
48
48
49 ##########################
49 ##########################
50 ## GUNICORN WSGI SERVER ##
50 ## GUNICORN WSGI SERVER ##
51 ##########################
51 ##########################
52 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
52 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
53
53
54 use = egg:gunicorn#main
54 use = egg:gunicorn#main
55 ## Sets the number of process workers. You must set `instance_id = *`
55 ## Sets the number of process workers. You must set `instance_id = *`
56 ## when this option is set to more than one worker, recommended
56 ## when this option is set to more than one worker, recommended
57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
58 ## The `instance_id = *` must be set in the [app:main] section below
58 ## The `instance_id = *` must be set in the [app:main] section below
59 #workers = 2
59 #workers = 2
60 ## number of threads for each of the worker, must be set to 1 for gevent
60 ## number of threads for each of the worker, must be set to 1 for gevent
61 ## generally recommened to be at 1
61 ## generally recommened to be at 1
62 #threads = 1
62 #threads = 1
63 ## process name
63 ## process name
64 #proc_name = rhodecode
64 #proc_name = rhodecode
65 ## type of worker class, one of sync, gevent
65 ## type of worker class, one of sync, gevent
66 ## recommended for bigger setup is using of of other than sync one
66 ## recommended for bigger setup is using of of other than sync one
67 #worker_class = sync
67 #worker_class = sync
68 ## The maximum number of simultaneous clients. Valid only for Gevent
68 ## The maximum number of simultaneous clients. Valid only for Gevent
69 #worker_connections = 10
69 #worker_connections = 10
70 ## max number of requests that worker will handle before being gracefully
70 ## max number of requests that worker will handle before being gracefully
71 ## restarted, could prevent memory leaks
71 ## restarted, could prevent memory leaks
72 #max_requests = 1000
72 #max_requests = 1000
73 #max_requests_jitter = 30
73 #max_requests_jitter = 30
74 ## amount of time a worker can spend with handling a request before it
74 ## amount of time a worker can spend with handling a request before it
75 ## gets killed and restarted. Set to 6hrs
75 ## gets killed and restarted. Set to 6hrs
76 #timeout = 21600
76 #timeout = 21600
77
77
78 ## prefix middleware for RhodeCode.
78 ## prefix middleware for RhodeCode.
79 ## recommended when using proxy setup.
79 ## recommended when using proxy setup.
80 ## allows to set RhodeCode under a prefix in server.
80 ## allows to set RhodeCode under a prefix in server.
81 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
81 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
82 ## And set your prefix like: `prefix = /custom_prefix`
82 ## And set your prefix like: `prefix = /custom_prefix`
83 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
83 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
84 ## to make your cookies only work on prefix url
84 ## to make your cookies only work on prefix url
85 [filter:proxy-prefix]
85 [filter:proxy-prefix]
86 use = egg:PasteDeploy#prefix
86 use = egg:PasteDeploy#prefix
87 prefix = /
87 prefix = /
88
88
89 [app:main]
89 [app:main]
90 is_test = True
90 is_test = True
91 use = egg:rhodecode-enterprise-ce
91 use = egg:rhodecode-enterprise-ce
92
92
93 ## enable proxy prefix middleware, defined above
93 ## enable proxy prefix middleware, defined above
94 #filter-with = proxy-prefix
94 #filter-with = proxy-prefix
95
95
96
96
97 ## RHODECODE PLUGINS ##
97 ## RHODECODE PLUGINS ##
98 rhodecode.includes = rhodecode.api
98 rhodecode.includes = rhodecode.api
99
99
100 # api prefix url
100 # api prefix url
101 rhodecode.api.url = /_admin/api
101 rhodecode.api.url = /_admin/api
102
102
103
103
104 ## END RHODECODE PLUGINS ##
104 ## END RHODECODE PLUGINS ##
105
105
106 ## encryption key used to encrypt social plugin tokens,
106 ## encryption key used to encrypt social plugin tokens,
107 ## remote_urls with credentials etc, if not set it defaults to
107 ## remote_urls with credentials etc, if not set it defaults to
108 ## `beaker.session.secret`
108 ## `beaker.session.secret`
109 #rhodecode.encrypted_values.secret =
109 #rhodecode.encrypted_values.secret =
110
110
111 ## decryption strict mode (enabled by default). It controls if decryption raises
111 ## decryption strict mode (enabled by default). It controls if decryption raises
112 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
112 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 #rhodecode.encrypted_values.strict = false
113 #rhodecode.encrypted_values.strict = false
114
114
115 ## return gzipped responses from Rhodecode (static files/application)
115 ## return gzipped responses from Rhodecode (static files/application)
116 gzip_responses = false
116 gzip_responses = false
117
117
118 ## autogenerate javascript routes file on startup
118 ## autogenerate javascript routes file on startup
119 generate_js_files = false
119 generate_js_files = false
120
120
121 ## Optional Languages
121 ## Optional Languages
122 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
122 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
123 lang = en
123 lang = en
124
124
125 ## perform a full repository scan on each server start, this should be
125 ## perform a full repository scan on each server start, this should be
126 ## set to false after first startup, to allow faster server restarts.
126 ## set to false after first startup, to allow faster server restarts.
127 startup.import_repos = true
127 startup.import_repos = true
128
128
129 ## Uncomment and set this path to use archive download cache.
129 ## Uncomment and set this path to use archive download cache.
130 ## Once enabled, generated archives will be cached at this location
130 ## Once enabled, generated archives will be cached at this location
131 ## and served from the cache during subsequent requests for the same archive of
131 ## and served from the cache during subsequent requests for the same archive of
132 ## the repository.
132 ## the repository.
133 #archive_cache_dir = /tmp/tarballcache
133 #archive_cache_dir = /tmp/tarballcache
134
134
135 ## URL at which the application is running. This is used for bootstraping
135 ## URL at which the application is running. This is used for bootstraping
136 ## requests in context when no web request is available. Used in ishell, or
136 ## requests in context when no web request is available. Used in ishell, or
137 ## SSH calls. Set this for events to receive proper url for SSH calls.
137 ## SSH calls. Set this for events to receive proper url for SSH calls.
138 app.base_url = http://rhodecode.local
138 app.base_url = http://rhodecode.local
139
139
140 ## change this to unique ID for security
140 ## change this to unique ID for security
141 app_instance_uuid = rc-production
141 app_instance_uuid = rc-production
142
142
143 ## cut off limit for large diffs (size in bytes)
143 ## cut off limit for large diffs (size in bytes)
144 cut_off_limit_diff = 1024000
144 cut_off_limit_diff = 1024000
145 cut_off_limit_file = 256000
145 cut_off_limit_file = 256000
146
146
147 ## use cache version of scm repo everywhere
147 ## use cache version of scm repo everywhere
148 vcs_full_cache = false
148 vcs_full_cache = false
149
149
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
151 ## Normally this is controlled by proper http flags sent from http server
151 ## Normally this is controlled by proper http flags sent from http server
152 force_https = false
152 force_https = false
153
153
154 ## use Strict-Transport-Security headers
154 ## use Strict-Transport-Security headers
155 use_htsts = false
155 use_htsts = false
156
156
157 ## git rev filter option, --all is the default filter, if you need to
157 ## git rev filter option, --all is the default filter, if you need to
158 ## hide all refs in changelog switch this to --branches --tags
158 ## hide all refs in changelog switch this to --branches --tags
159 git_rev_filter = --all
159 git_rev_filter = --all
160
160
161 # Set to true if your repos are exposed using the dumb protocol
161 # Set to true if your repos are exposed using the dumb protocol
162 git_update_server_info = false
162 git_update_server_info = false
163
163
164 ## RSS/ATOM feed options
164 ## RSS/ATOM feed options
165 rss_cut_off_limit = 256000
165 rss_cut_off_limit = 256000
166 rss_items_per_page = 10
166 rss_items_per_page = 10
167 rss_include_diff = false
167 rss_include_diff = false
168
168
169 ## gist URL alias, used to create nicer urls for gist. This should be an
169 ## gist URL alias, used to create nicer urls for gist. This should be an
170 ## url that does rewrites to _admin/gists/{gistid}.
170 ## url that does rewrites to _admin/gists/{gistid}.
171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
173 gist_alias_url =
173 gist_alias_url =
174
174
175 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
175 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
176 ## used for access.
176 ## used for access.
177 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
177 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
178 ## came from the the logged in user who own this authentication token.
178 ## came from the the logged in user who own this authentication token.
179 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
179 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
180 ## authentication token. Such view would be only accessible when used together
180 ## authentication token. Such view would be only accessible when used together
181 ## with this authentication token
181 ## with this authentication token
182 ##
182 ##
183 ## list of all views can be found under `/_admin/permissions/auth_token_access`
183 ## list of all views can be found under `/_admin/permissions/auth_token_access`
184 ## The list should be "," separated and on a single line.
184 ## The list should be "," separated and on a single line.
185 ##
185 ##
186 ## Most common views to enable:
186 ## Most common views to enable:
187 # RepoCommitsView:repo_commit_download
187 # RepoCommitsView:repo_commit_download
188 # RepoCommitsView:repo_commit_patch
188 # RepoCommitsView:repo_commit_patch
189 # RepoCommitsView:repo_commit_raw
189 # RepoCommitsView:repo_commit_raw
190 # RepoCommitsView:repo_commit_raw@TOKEN
190 # RepoCommitsView:repo_commit_raw@TOKEN
191 # RepoFilesView:repo_files_diff
191 # RepoFilesView:repo_files_diff
192 # RepoFilesView:repo_archivefile
192 # RepoFilesView:repo_archivefile
193 # RepoFilesView:repo_file_raw
193 # RepoFilesView:repo_file_raw
194 # GistView:*
194 # GistView:*
195 api_access_controllers_whitelist =
195 api_access_controllers_whitelist =
196
196
197 ## default encoding used to convert from and to unicode
197 ## default encoding used to convert from and to unicode
198 ## can be also a comma separated list of encoding in case of mixed encodings
198 ## can be also a comma separated list of encoding in case of mixed encodings
199 default_encoding = UTF-8
199 default_encoding = UTF-8
200
200
201 ## instance-id prefix
201 ## instance-id prefix
202 ## a prefix key for this instance used for cache invalidation when running
202 ## a prefix key for this instance used for cache invalidation when running
203 ## multiple instances of rhodecode, make sure it's globally unique for
203 ## multiple instances of rhodecode, make sure it's globally unique for
204 ## all running rhodecode instances. Leave empty if you don't use it
204 ## all running rhodecode instances. Leave empty if you don't use it
205 instance_id =
205 instance_id =
206
206
207 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
207 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
208 ## of an authentication plugin also if it is disabled by it's settings.
208 ## of an authentication plugin also if it is disabled by it's settings.
209 ## This could be useful if you are unable to log in to the system due to broken
209 ## This could be useful if you are unable to log in to the system due to broken
210 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
210 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
211 ## module to log in again and fix the settings.
211 ## module to log in again and fix the settings.
212 ##
212 ##
213 ## Available builtin plugin IDs (hash is part of the ID):
213 ## Available builtin plugin IDs (hash is part of the ID):
214 ## egg:rhodecode-enterprise-ce#rhodecode
214 ## egg:rhodecode-enterprise-ce#rhodecode
215 ## egg:rhodecode-enterprise-ce#pam
215 ## egg:rhodecode-enterprise-ce#pam
216 ## egg:rhodecode-enterprise-ce#ldap
216 ## egg:rhodecode-enterprise-ce#ldap
217 ## egg:rhodecode-enterprise-ce#jasig_cas
217 ## egg:rhodecode-enterprise-ce#jasig_cas
218 ## egg:rhodecode-enterprise-ce#headers
218 ## egg:rhodecode-enterprise-ce#headers
219 ## egg:rhodecode-enterprise-ce#crowd
219 ## egg:rhodecode-enterprise-ce#crowd
220 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
220 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
221
221
222 ## alternative return HTTP header for failed authentication. Default HTTP
222 ## alternative return HTTP header for failed authentication. Default HTTP
223 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
223 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
224 ## handling that causing a series of failed authentication calls.
224 ## handling that causing a series of failed authentication calls.
225 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
225 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
226 ## This will be served instead of default 401 on bad authnetication
226 ## This will be served instead of default 401 on bad authnetication
227 auth_ret_code =
227 auth_ret_code =
228
228
229 ## use special detection method when serving auth_ret_code, instead of serving
229 ## use special detection method when serving auth_ret_code, instead of serving
230 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
230 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
231 ## and then serve auth_ret_code to clients
231 ## and then serve auth_ret_code to clients
232 auth_ret_code_detection = false
232 auth_ret_code_detection = false
233
233
234 ## locking return code. When repository is locked return this HTTP code. 2XX
234 ## locking return code. When repository is locked return this HTTP code. 2XX
235 ## codes don't break the transactions while 4XX codes do
235 ## codes don't break the transactions while 4XX codes do
236 lock_ret_code = 423
236 lock_ret_code = 423
237
237
238 ## allows to change the repository location in settings page
238 ## allows to change the repository location in settings page
239 allow_repo_location_change = true
239 allow_repo_location_change = true
240
240
241 ## allows to setup custom hooks in settings page
241 ## allows to setup custom hooks in settings page
242 allow_custom_hooks_settings = true
242 allow_custom_hooks_settings = true
243
243
244 ## generated license token, goto license page in RhodeCode settings to obtain
244 ## generated license token, goto license page in RhodeCode settings to obtain
245 ## new token
245 ## new token
246 license_token = abra-cada-bra1-rce3
246 license_token = abra-cada-bra1-rce3
247
247
248 ## supervisor connection uri, for managing supervisor and logs.
248 ## supervisor connection uri, for managing supervisor and logs.
249 supervisor.uri =
249 supervisor.uri =
250 ## supervisord group name/id we only want this RC instance to handle
250 ## supervisord group name/id we only want this RC instance to handle
251 supervisor.group_id = dev
251 supervisor.group_id = dev
252
252
253 ## Display extended labs settings
253 ## Display extended labs settings
254 labs_settings_active = true
254 labs_settings_active = true
255
255
256 ####################################
256 ####################################
257 ### CELERY CONFIG ####
257 ### CELERY CONFIG ####
258 ####################################
258 ####################################
259 use_celery = false
259 use_celery = false
260 broker.host = localhost
260 broker.host = localhost
261 broker.vhost = rabbitmqhost
261 broker.vhost = rabbitmqhost
262 broker.port = 5672
262 broker.port = 5672
263 broker.user = rabbitmq
263 broker.user = rabbitmq
264 broker.password = qweqwe
264 broker.password = qweqwe
265
265
266 celery.imports = rhodecode.lib.celerylib.tasks
266 celery.imports = rhodecode.lib.celerylib.tasks
267
267
268 celery.result.backend = amqp
268 celery.result.backend = amqp
269 celery.result.dburi = amqp://
269 celery.result.dburi = amqp://
270 celery.result.serialier = json
270 celery.result.serialier = json
271
271
272 #celery.send.task.error.emails = true
272 #celery.send.task.error.emails = true
273 #celery.amqp.task.result.expires = 18000
273 #celery.amqp.task.result.expires = 18000
274
274
275 celeryd.concurrency = 2
275 celeryd.concurrency = 2
276 #celeryd.log.file = celeryd.log
276 #celeryd.log.file = celeryd.log
277 celeryd.log.level = debug
277 celeryd.log.level = debug
278 celeryd.max.tasks.per.child = 1
278 celeryd.max.tasks.per.child = 1
279
279
280 ## tasks will never be sent to the queue, but executed locally instead.
280 ## tasks will never be sent to the queue, but executed locally instead.
281 celery.always.eager = false
281 celery.always.eager = false
282
282
283 ####################################
283 ####################################
284 ### BEAKER CACHE ####
284 ### BEAKER CACHE ####
285 ####################################
285 ####################################
286 # default cache dir for templates. Putting this into a ramdisk
286 # default cache dir for templates. Putting this into a ramdisk
287 ## can boost performance, eg. %(here)s/data_ramdisk
287 ## can boost performance, eg. %(here)s/data_ramdisk
288 cache_dir = %(here)s/data
288 cache_dir = %(here)s/data
289
289
290 ## locking and default file storage for Beaker. Putting this into a ramdisk
290 ## locking and default file storage for Beaker. Putting this into a ramdisk
291 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
291 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
292 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
292 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
293 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
293 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
294
294
295 beaker.cache.regions = long_term, sql_cache_short
295 beaker.cache.regions = long_term
296
296
297 beaker.cache.long_term.type = memory
297 beaker.cache.long_term.type = memory
298 beaker.cache.long_term.expire = 36000
298 beaker.cache.long_term.expire = 36000
299 beaker.cache.long_term.key_length = 256
299 beaker.cache.long_term.key_length = 256
300
300
301 beaker.cache.sql_cache_short.type = memory
302 beaker.cache.sql_cache_short.expire = 1
303 beaker.cache.sql_cache_short.key_length = 256
304
301
305 #####################################
302 #####################################
306 ### DOGPILE CACHE ####
303 ### DOGPILE CACHE ####
307 #####################################
304 #####################################
308
305
309 ## permission tree cache settings
306 ## permission tree cache settings
310 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
307 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
311 rc_cache.cache_perms.expiration_time = 0
308 rc_cache.cache_perms.expiration_time = 0
312 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
309 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
313
310
311
312 ## cache settings for SQL queries
313 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
314 rc_cache.sql_cache_short.expiration_time = 0
315
316
314 ####################################
317 ####################################
315 ### BEAKER SESSION ####
318 ### BEAKER SESSION ####
316 ####################################
319 ####################################
317
320
318 ## .session.type is type of storage options for the session, current allowed
321 ## .session.type is type of storage options for the session, current allowed
319 ## types are file, ext:memcached, ext:database, and memory (default).
322 ## types are file, ext:memcached, ext:database, and memory (default).
320 beaker.session.type = file
323 beaker.session.type = file
321 beaker.session.data_dir = %(here)s/rc/data/sessions/data
324 beaker.session.data_dir = %(here)s/rc/data/sessions/data
322
325
323 ## db based session, fast, and allows easy management over logged in users
326 ## db based session, fast, and allows easy management over logged in users
324 #beaker.session.type = ext:database
327 #beaker.session.type = ext:database
325 #beaker.session.table_name = db_session
328 #beaker.session.table_name = db_session
326 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
329 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
327 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
330 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
328 #beaker.session.sa.pool_recycle = 3600
331 #beaker.session.sa.pool_recycle = 3600
329 #beaker.session.sa.echo = false
332 #beaker.session.sa.echo = false
330
333
331 beaker.session.key = rhodecode
334 beaker.session.key = rhodecode
332 beaker.session.secret = test-rc-uytcxaz
335 beaker.session.secret = test-rc-uytcxaz
333 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
336 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
334
337
335 ## Secure encrypted cookie. Requires AES and AES python libraries
338 ## Secure encrypted cookie. Requires AES and AES python libraries
336 ## you must disable beaker.session.secret to use this
339 ## you must disable beaker.session.secret to use this
337 #beaker.session.encrypt_key = key_for_encryption
340 #beaker.session.encrypt_key = key_for_encryption
338 #beaker.session.validate_key = validation_key
341 #beaker.session.validate_key = validation_key
339
342
340 ## sets session as invalid(also logging out user) if it haven not been
343 ## sets session as invalid(also logging out user) if it haven not been
341 ## accessed for given amount of time in seconds
344 ## accessed for given amount of time in seconds
342 beaker.session.timeout = 2592000
345 beaker.session.timeout = 2592000
343 beaker.session.httponly = true
346 beaker.session.httponly = true
344 ## Path to use for the cookie. Set to prefix if you use prefix middleware
347 ## Path to use for the cookie. Set to prefix if you use prefix middleware
345 #beaker.session.cookie_path = /custom_prefix
348 #beaker.session.cookie_path = /custom_prefix
346
349
347 ## uncomment for https secure cookie
350 ## uncomment for https secure cookie
348 beaker.session.secure = false
351 beaker.session.secure = false
349
352
350 ## auto save the session to not to use .save()
353 ## auto save the session to not to use .save()
351 beaker.session.auto = false
354 beaker.session.auto = false
352
355
353 ## default cookie expiration time in seconds, set to `true` to set expire
356 ## default cookie expiration time in seconds, set to `true` to set expire
354 ## at browser close
357 ## at browser close
355 #beaker.session.cookie_expires = 3600
358 #beaker.session.cookie_expires = 3600
356
359
357 ###################################
360 ###################################
358 ## SEARCH INDEXING CONFIGURATION ##
361 ## SEARCH INDEXING CONFIGURATION ##
359 ###################################
362 ###################################
360 ## Full text search indexer is available in rhodecode-tools under
363 ## Full text search indexer is available in rhodecode-tools under
361 ## `rhodecode-tools index` command
364 ## `rhodecode-tools index` command
362
365
363 ## WHOOSH Backend, doesn't require additional services to run
366 ## WHOOSH Backend, doesn't require additional services to run
364 ## it works good with few dozen repos
367 ## it works good with few dozen repos
365 search.module = rhodecode.lib.index.whoosh
368 search.module = rhodecode.lib.index.whoosh
366 search.location = %(here)s/data/index
369 search.location = %(here)s/data/index
367
370
368 ########################################
371 ########################################
369 ### CHANNELSTREAM CONFIG ####
372 ### CHANNELSTREAM CONFIG ####
370 ########################################
373 ########################################
371 ## channelstream enables persistent connections and live notification
374 ## channelstream enables persistent connections and live notification
372 ## in the system. It's also used by the chat system
375 ## in the system. It's also used by the chat system
373
376
374 channelstream.enabled = false
377 channelstream.enabled = false
375
378
376 ## server address for channelstream server on the backend
379 ## server address for channelstream server on the backend
377 channelstream.server = 127.0.0.1:9800
380 channelstream.server = 127.0.0.1:9800
378 ## location of the channelstream server from outside world
381 ## location of the channelstream server from outside world
379 ## use ws:// for http or wss:// for https. This address needs to be handled
382 ## use ws:// for http or wss:// for https. This address needs to be handled
380 ## by external HTTP server such as Nginx or Apache
383 ## by external HTTP server such as Nginx or Apache
381 ## see nginx/apache configuration examples in our docs
384 ## see nginx/apache configuration examples in our docs
382 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
385 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
383 channelstream.secret = secret
386 channelstream.secret = secret
384 channelstream.history.location = %(here)s/channelstream_history
387 channelstream.history.location = %(here)s/channelstream_history
385
388
386 ## Internal application path that Javascript uses to connect into.
389 ## Internal application path that Javascript uses to connect into.
387 ## If you use proxy-prefix the prefix should be added before /_channelstream
390 ## If you use proxy-prefix the prefix should be added before /_channelstream
388 channelstream.proxy_path = /_channelstream
391 channelstream.proxy_path = /_channelstream
389
392
390
393
391 ###################################
394 ###################################
392 ## APPENLIGHT CONFIG ##
395 ## APPENLIGHT CONFIG ##
393 ###################################
396 ###################################
394
397
395 ## Appenlight is tailored to work with RhodeCode, see
398 ## Appenlight is tailored to work with RhodeCode, see
396 ## http://appenlight.com for details how to obtain an account
399 ## http://appenlight.com for details how to obtain an account
397
400
398 ## appenlight integration enabled
401 ## appenlight integration enabled
399 appenlight = false
402 appenlight = false
400
403
401 appenlight.server_url = https://api.appenlight.com
404 appenlight.server_url = https://api.appenlight.com
402 appenlight.api_key = YOUR_API_KEY
405 appenlight.api_key = YOUR_API_KEY
403 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
406 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
404
407
405 # used for JS client
408 # used for JS client
406 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
409 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
407
410
408 ## TWEAK AMOUNT OF INFO SENT HERE
411 ## TWEAK AMOUNT OF INFO SENT HERE
409
412
410 ## enables 404 error logging (default False)
413 ## enables 404 error logging (default False)
411 appenlight.report_404 = false
414 appenlight.report_404 = false
412
415
413 ## time in seconds after request is considered being slow (default 1)
416 ## time in seconds after request is considered being slow (default 1)
414 appenlight.slow_request_time = 1
417 appenlight.slow_request_time = 1
415
418
416 ## record slow requests in application
419 ## record slow requests in application
417 ## (needs to be enabled for slow datastore recording and time tracking)
420 ## (needs to be enabled for slow datastore recording and time tracking)
418 appenlight.slow_requests = true
421 appenlight.slow_requests = true
419
422
420 ## enable hooking to application loggers
423 ## enable hooking to application loggers
421 appenlight.logging = true
424 appenlight.logging = true
422
425
423 ## minimum log level for log capture
426 ## minimum log level for log capture
424 appenlight.logging.level = WARNING
427 appenlight.logging.level = WARNING
425
428
426 ## send logs only from erroneous/slow requests
429 ## send logs only from erroneous/slow requests
427 ## (saves API quota for intensive logging)
430 ## (saves API quota for intensive logging)
428 appenlight.logging_on_error = false
431 appenlight.logging_on_error = false
429
432
430 ## list of additonal keywords that should be grabbed from environ object
433 ## list of additonal keywords that should be grabbed from environ object
431 ## can be string with comma separated list of words in lowercase
434 ## can be string with comma separated list of words in lowercase
432 ## (by default client will always send following info:
435 ## (by default client will always send following info:
433 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
436 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
434 ## start with HTTP* this list be extended with additional keywords here
437 ## start with HTTP* this list be extended with additional keywords here
435 appenlight.environ_keys_whitelist =
438 appenlight.environ_keys_whitelist =
436
439
437 ## list of keywords that should be blanked from request object
440 ## list of keywords that should be blanked from request object
438 ## can be string with comma separated list of words in lowercase
441 ## can be string with comma separated list of words in lowercase
439 ## (by default client will always blank keys that contain following words
442 ## (by default client will always blank keys that contain following words
440 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
443 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
441 ## this list be extended with additional keywords set here
444 ## this list be extended with additional keywords set here
442 appenlight.request_keys_blacklist =
445 appenlight.request_keys_blacklist =
443
446
444 ## list of namespaces that should be ignores when gathering log entries
447 ## list of namespaces that should be ignores when gathering log entries
445 ## can be string with comma separated list of namespaces
448 ## can be string with comma separated list of namespaces
446 ## (by default the client ignores own entries: appenlight_client.client)
449 ## (by default the client ignores own entries: appenlight_client.client)
447 appenlight.log_namespace_blacklist =
450 appenlight.log_namespace_blacklist =
448
451
449
452
450 ################################################################################
453 ################################################################################
451 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
454 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
452 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
455 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
453 ## execute malicious code after an exception is raised. ##
456 ## execute malicious code after an exception is raised. ##
454 ################################################################################
457 ################################################################################
455 set debug = false
458 set debug = false
456
459
457
460
458 ##############
461 ##############
459 ## STYLING ##
462 ## STYLING ##
460 ##############
463 ##############
461 debug_style = false
464 debug_style = false
462
465
463 ###########################################
466 ###########################################
464 ### MAIN RHODECODE DATABASE CONFIG ###
467 ### MAIN RHODECODE DATABASE CONFIG ###
465 ###########################################
468 ###########################################
466 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
469 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
467 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
470 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
468 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
471 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
469 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
472 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
470
473
471 # see sqlalchemy docs for other advanced settings
474 # see sqlalchemy docs for other advanced settings
472
475
473 ## print the sql statements to output
476 ## print the sql statements to output
474 sqlalchemy.db1.echo = false
477 sqlalchemy.db1.echo = false
475 ## recycle the connections after this amount of seconds
478 ## recycle the connections after this amount of seconds
476 sqlalchemy.db1.pool_recycle = 3600
479 sqlalchemy.db1.pool_recycle = 3600
477 sqlalchemy.db1.convert_unicode = true
480 sqlalchemy.db1.convert_unicode = true
478
481
479 ## the number of connections to keep open inside the connection pool.
482 ## the number of connections to keep open inside the connection pool.
480 ## 0 indicates no limit
483 ## 0 indicates no limit
481 #sqlalchemy.db1.pool_size = 5
484 #sqlalchemy.db1.pool_size = 5
482
485
483 ## the number of connections to allow in connection pool "overflow", that is
486 ## the number of connections to allow in connection pool "overflow", that is
484 ## connections that can be opened above and beyond the pool_size setting,
487 ## connections that can be opened above and beyond the pool_size setting,
485 ## which defaults to five.
488 ## which defaults to five.
486 #sqlalchemy.db1.max_overflow = 10
489 #sqlalchemy.db1.max_overflow = 10
487
490
488
491
489 ##################
492 ##################
490 ### VCS CONFIG ###
493 ### VCS CONFIG ###
491 ##################
494 ##################
492 vcs.server.enable = true
495 vcs.server.enable = true
493 vcs.server = localhost:9901
496 vcs.server = localhost:9901
494
497
495 ## Web server connectivity protocol, responsible for web based VCS operatations
498 ## Web server connectivity protocol, responsible for web based VCS operatations
496 ## Available protocols are:
499 ## Available protocols are:
497 ## `http` - use http-rpc backend (default)
500 ## `http` - use http-rpc backend (default)
498 vcs.server.protocol = http
501 vcs.server.protocol = http
499
502
500 ## Push/Pull operations protocol, available options are:
503 ## Push/Pull operations protocol, available options are:
501 ## `http` - use http-rpc backend (default)
504 ## `http` - use http-rpc backend (default)
502 ## `vcsserver.scm_app` - internal app (EE only)
505 ## `vcsserver.scm_app` - internal app (EE only)
503 vcs.scm_app_implementation = http
506 vcs.scm_app_implementation = http
504
507
505 ## Push/Pull operations hooks protocol, available options are:
508 ## Push/Pull operations hooks protocol, available options are:
506 ## `http` - use http-rpc backend (default)
509 ## `http` - use http-rpc backend (default)
507 vcs.hooks.protocol = http
510 vcs.hooks.protocol = http
508 vcs.hooks.host = 127.0.0.1
511 vcs.hooks.host = 127.0.0.1
509
512
510 vcs.server.log_level = debug
513 vcs.server.log_level = debug
511 ## Start VCSServer with this instance as a subprocess, usefull for development
514 ## Start VCSServer with this instance as a subprocess, usefull for development
512 vcs.start_server = false
515 vcs.start_server = false
513
516
514 ## List of enabled VCS backends, available options are:
517 ## List of enabled VCS backends, available options are:
515 ## `hg` - mercurial
518 ## `hg` - mercurial
516 ## `git` - git
519 ## `git` - git
517 ## `svn` - subversion
520 ## `svn` - subversion
518 vcs.backends = hg, git, svn
521 vcs.backends = hg, git, svn
519
522
520 vcs.connection_timeout = 3600
523 vcs.connection_timeout = 3600
521 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
524 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
522 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
525 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
523 #vcs.svn.compatible_version = pre-1.8-compatible
526 #vcs.svn.compatible_version = pre-1.8-compatible
524
527
525
528
526 ############################################################
529 ############################################################
527 ### Subversion proxy support (mod_dav_svn) ###
530 ### Subversion proxy support (mod_dav_svn) ###
528 ### Maps RhodeCode repo groups into SVN paths for Apache ###
531 ### Maps RhodeCode repo groups into SVN paths for Apache ###
529 ############################################################
532 ############################################################
530 ## Enable or disable the config file generation.
533 ## Enable or disable the config file generation.
531 svn.proxy.generate_config = false
534 svn.proxy.generate_config = false
532 ## Generate config file with `SVNListParentPath` set to `On`.
535 ## Generate config file with `SVNListParentPath` set to `On`.
533 svn.proxy.list_parent_path = true
536 svn.proxy.list_parent_path = true
534 ## Set location and file name of generated config file.
537 ## Set location and file name of generated config file.
535 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
538 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
536 ## Used as a prefix to the `Location` block in the generated config file.
539 ## Used as a prefix to the `Location` block in the generated config file.
537 ## In most cases it should be set to `/`.
540 ## In most cases it should be set to `/`.
538 svn.proxy.location_root = /
541 svn.proxy.location_root = /
539 ## Command to reload the mod dav svn configuration on change.
542 ## Command to reload the mod dav svn configuration on change.
540 ## Example: `/etc/init.d/apache2 reload`
543 ## Example: `/etc/init.d/apache2 reload`
541 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
544 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
542 ## If the timeout expires before the reload command finishes, the command will
545 ## If the timeout expires before the reload command finishes, the command will
543 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
546 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
544 #svn.proxy.reload_timeout = 10
547 #svn.proxy.reload_timeout = 10
545
548
546 ############################################################
549 ############################################################
547 ### SSH Support Settings ###
550 ### SSH Support Settings ###
548 ############################################################
551 ############################################################
549
552
550 ## Defines if the authorized_keys file should be written on any change of
553 ## Defines if the authorized_keys file should be written on any change of
551 ## user ssh keys, setting this to false also disables posibility of adding
554 ## user ssh keys, setting this to false also disables posibility of adding
552 ## ssh keys for users from web interface.
555 ## ssh keys for users from web interface.
553 ssh.generate_authorized_keyfile = true
556 ssh.generate_authorized_keyfile = true
554
557
555 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
558 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
556 # ssh.authorized_keys_ssh_opts =
559 # ssh.authorized_keys_ssh_opts =
557
560
558 ## File to generate the authorized keys together with options
561 ## File to generate the authorized keys together with options
559 ## It is possible to have multiple key files specified in `sshd_config` e.g.
562 ## It is possible to have multiple key files specified in `sshd_config` e.g.
560 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
563 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
561 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
564 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
562
565
563 ## Command to execute the SSH wrapper. The binary is available in the
566 ## Command to execute the SSH wrapper. The binary is available in the
564 ## rhodecode installation directory.
567 ## rhodecode installation directory.
565 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
568 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
566 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
569 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
567
570
568 ## Allow shell when executing the ssh-wrapper command
571 ## Allow shell when executing the ssh-wrapper command
569 ssh.wrapper_cmd_allow_shell = false
572 ssh.wrapper_cmd_allow_shell = false
570
573
571 ## Enables logging, and detailed output send back to the client. Usefull for
574 ## Enables logging, and detailed output send back to the client. Usefull for
572 ## debugging, shouldn't be used in production.
575 ## debugging, shouldn't be used in production.
573 ssh.enable_debug_logging = false
576 ssh.enable_debug_logging = false
574
577
575 ## Paths to binary executrables, by default they are the names, but we can
578 ## Paths to binary executrables, by default they are the names, but we can
576 ## override them if we want to use a custom one
579 ## override them if we want to use a custom one
577 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
580 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
578 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
581 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
579 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
582 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
580
583
581
584
582 ## Dummy marker to add new entries after.
585 ## Dummy marker to add new entries after.
583 ## Add any custom entries below. Please don't remove.
586 ## Add any custom entries below. Please don't remove.
584 custom.conf = 1
587 custom.conf = 1
585
588
586
589
587 ################################
590 ################################
588 ### LOGGING CONFIGURATION ####
591 ### LOGGING CONFIGURATION ####
589 ################################
592 ################################
590 [loggers]
593 [loggers]
591 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
594 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
592
595
593 [handlers]
596 [handlers]
594 keys = console, console_sql
597 keys = console, console_sql
595
598
596 [formatters]
599 [formatters]
597 keys = generic, color_formatter, color_formatter_sql
600 keys = generic, color_formatter, color_formatter_sql
598
601
599 #############
602 #############
600 ## LOGGERS ##
603 ## LOGGERS ##
601 #############
604 #############
602 [logger_root]
605 [logger_root]
603 level = NOTSET
606 level = NOTSET
604 handlers = console
607 handlers = console
605
608
606 [logger_routes]
609 [logger_routes]
607 level = DEBUG
610 level = DEBUG
608 handlers =
611 handlers =
609 qualname = routes.middleware
612 qualname = routes.middleware
610 ## "level = DEBUG" logs the route matched and routing variables.
613 ## "level = DEBUG" logs the route matched and routing variables.
611 propagate = 1
614 propagate = 1
612
615
613 [logger_beaker]
616 [logger_beaker]
614 level = DEBUG
617 level = DEBUG
615 handlers =
618 handlers =
616 qualname = beaker.container
619 qualname = beaker.container
617 propagate = 1
620 propagate = 1
618
621
619 [logger_rhodecode]
622 [logger_rhodecode]
620 level = DEBUG
623 level = DEBUG
621 handlers =
624 handlers =
622 qualname = rhodecode
625 qualname = rhodecode
623 propagate = 1
626 propagate = 1
624
627
625 [logger_sqlalchemy]
628 [logger_sqlalchemy]
626 level = ERROR
629 level = ERROR
627 handlers = console_sql
630 handlers = console_sql
628 qualname = sqlalchemy.engine
631 qualname = sqlalchemy.engine
629 propagate = 0
632 propagate = 0
630
633
631 [logger_ssh_wrapper]
634 [logger_ssh_wrapper]
632 level = DEBUG
635 level = DEBUG
633 handlers =
636 handlers =
634 qualname = ssh_wrapper
637 qualname = ssh_wrapper
635 propagate = 1
638 propagate = 1
636
639
637
640
638 ##############
641 ##############
639 ## HANDLERS ##
642 ## HANDLERS ##
640 ##############
643 ##############
641
644
642 [handler_console]
645 [handler_console]
643 class = StreamHandler
646 class = StreamHandler
644 args = (sys.stderr,)
647 args = (sys.stderr,)
645 level = DEBUG
648 level = DEBUG
646 formatter = generic
649 formatter = generic
647
650
648 [handler_console_sql]
651 [handler_console_sql]
649 class = StreamHandler
652 class = StreamHandler
650 args = (sys.stderr,)
653 args = (sys.stderr,)
651 level = WARN
654 level = WARN
652 formatter = generic
655 formatter = generic
653
656
654 ################
657 ################
655 ## FORMATTERS ##
658 ## FORMATTERS ##
656 ################
659 ################
657
660
658 [formatter_generic]
661 [formatter_generic]
659 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
662 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
660 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
663 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
661 datefmt = %Y-%m-%d %H:%M:%S
664 datefmt = %Y-%m-%d %H:%M:%S
662
665
663 [formatter_color_formatter]
666 [formatter_color_formatter]
664 class = rhodecode.lib.logging_formatter.ColorFormatter
667 class = rhodecode.lib.logging_formatter.ColorFormatter
665 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
668 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
666 datefmt = %Y-%m-%d %H:%M:%S
669 datefmt = %Y-%m-%d %H:%M:%S
667
670
668 [formatter_color_formatter_sql]
671 [formatter_color_formatter_sql]
669 class = rhodecode.lib.logging_formatter.ColorFormatterSql
672 class = rhodecode.lib.logging_formatter.ColorFormatterSql
670 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
673 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
671 datefmt = %Y-%m-%d %H:%M:%S
674 datefmt = %Y-%m-%d %H:%M:%S
General Comments 0
You need to be logged in to leave comments. Login now