##// END OF EJS Templates
Updated with a latest changes.
ilin.s -
r5357:947712ef merge default
parent child Browse files
Show More
@@ -1,818 +1,818 b''
1
1
2 ; #########################################
2 ; #########################################
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 ; #########################################
4 ; #########################################
5
5
6 [DEFAULT]
6 [DEFAULT]
7 ; Debug flag sets all loggers to debug, and enables request tracking
7 ; Debug flag sets all loggers to debug, and enables request tracking
8 debug = true
8 debug = true
9
9
10 ; ########################################################################
10 ; ########################################################################
11 ; EMAIL CONFIGURATION
11 ; EMAIL CONFIGURATION
12 ; These settings will be used by the RhodeCode mailing system
12 ; These settings will be used by the RhodeCode mailing system
13 ; ########################################################################
13 ; ########################################################################
14
14
15 ; prefix all emails subjects with given prefix, helps filtering out emails
15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 #email_prefix = [RhodeCode]
16 #email_prefix = [RhodeCode]
17
17
18 ; email FROM address all mails will be sent
18 ; email FROM address all mails will be sent
19 #app_email_from = rhodecode-noreply@localhost
19 #app_email_from = rhodecode-noreply@localhost
20
20
21 #smtp_server = mail.server.com
21 #smtp_server = mail.server.com
22 #smtp_username =
22 #smtp_username =
23 #smtp_password =
23 #smtp_password =
24 #smtp_port =
24 #smtp_port =
25 #smtp_use_tls = false
25 #smtp_use_tls = false
26 #smtp_use_ssl = true
26 #smtp_use_ssl = true
27
27
28 [server:main]
28 [server:main]
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 host = 127.0.0.1
31 host = 127.0.0.1
32 port = 10020
32 port = 10020
33
33
34 ; ##################################################
34 ; ##################################################
35 ; WAITRESS WSGI SERVER - Recommended for Development
35 ; WAITRESS WSGI SERVER - Recommended for Development
36 ; ##################################################
36 ; ##################################################
37
37
38 ; use server type
38 ; use server type
39 use = egg:waitress#main
39 use = egg:waitress#main
40
40
41 ; number of worker threads
41 ; number of worker threads
42 threads = 5
42 threads = 5
43
43
44 ; MAX BODY SIZE 100GB
44 ; MAX BODY SIZE 100GB
45 max_request_body_size = 107374182400
45 max_request_body_size = 107374182400
46
46
47 ; Use poll instead of select, fixes file descriptors limits problems.
47 ; Use poll instead of select, fixes file descriptors limits problems.
48 ; May not work on old windows systems.
48 ; May not work on old windows systems.
49 asyncore_use_poll = true
49 asyncore_use_poll = true
50
50
51
51
52 ; ###########################
52 ; ###########################
53 ; GUNICORN APPLICATION SERVER
53 ; GUNICORN APPLICATION SERVER
54 ; ###########################
54 ; ###########################
55
55
56 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
56 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
57
57
58 ; Module to use, this setting shouldn't be changed
58 ; Module to use, this setting shouldn't be changed
59 #use = egg:gunicorn#main
59 #use = egg:gunicorn#main
60
60
61 ; Prefix middleware for RhodeCode.
61 ; Prefix middleware for RhodeCode.
62 ; recommended when using proxy setup.
62 ; recommended when using proxy setup.
63 ; allows to set RhodeCode under a prefix in server.
63 ; allows to set RhodeCode under a prefix in server.
64 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
64 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
65 ; And set your prefix like: `prefix = /custom_prefix`
65 ; And set your prefix like: `prefix = /custom_prefix`
66 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
66 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
67 ; to make your cookies only work on prefix url
67 ; to make your cookies only work on prefix url
68 [filter:proxy-prefix]
68 [filter:proxy-prefix]
69 use = egg:PasteDeploy#prefix
69 use = egg:PasteDeploy#prefix
70 prefix = /
70 prefix = /
71
71
72 [app:main]
72 [app:main]
73 ; The %(here)s variable will be replaced with the absolute path of parent directory
73 ; The %(here)s variable will be replaced with the absolute path of parent directory
74 ; of this file
74 ; of this file
75 ; Each option in the app:main can be override by an environmental variable
75 ; Each option in the app:main can be override by an environmental variable
76 ;
76 ;
77 ;To override an option:
77 ;To override an option:
78 ;
78 ;
79 ;RC_<KeyName>
79 ;RC_<KeyName>
80 ;Everything should be uppercase, . and - should be replaced by _.
80 ;Everything should be uppercase, . and - should be replaced by _.
81 ;For example, if you have these configuration settings:
81 ;For example, if you have these configuration settings:
82 ;rc_cache.repo_object.backend = foo
82 ;rc_cache.repo_object.backend = foo
83 ;can be overridden by
83 ;can be overridden by
84 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
84 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
85
85
86 use = egg:rhodecode-enterprise-ce
86 use = egg:rhodecode-enterprise-ce
87
87
88 ; enable proxy prefix middleware, defined above
88 ; enable proxy prefix middleware, defined above
89 #filter-with = proxy-prefix
89 #filter-with = proxy-prefix
90
90
91 ; #############
91 ; #############
92 ; DEBUG OPTIONS
92 ; DEBUG OPTIONS
93 ; #############
93 ; #############
94
94
95 pyramid.reload_templates = true
95 pyramid.reload_templates = true
96
96
97 # During development the we want to have the debug toolbar enabled
97 # During development the we want to have the debug toolbar enabled
98 pyramid.includes =
98 pyramid.includes =
99 pyramid_debugtoolbar
99 pyramid_debugtoolbar
100
100
101 debugtoolbar.hosts = 0.0.0.0/0
101 debugtoolbar.hosts = 0.0.0.0/0
102 debugtoolbar.exclude_prefixes =
102 debugtoolbar.exclude_prefixes =
103 /css
103 /css
104 /fonts
104 /fonts
105 /images
105 /images
106 /js
106 /js
107
107
108 ## RHODECODE PLUGINS ##
108 ## RHODECODE PLUGINS ##
109 rhodecode.includes =
109 rhodecode.includes =
110 rhodecode.api
110 rhodecode.api
111
111
112
112
113 # api prefix url
113 # api prefix url
114 rhodecode.api.url = /_admin/api
114 rhodecode.api.url = /_admin/api
115
115
116 ; enable debug style page
116 ; enable debug style page
117 debug_style = true
117 debug_style = true
118
118
119 ; #################
119 ; #################
120 ; END DEBUG OPTIONS
120 ; END DEBUG OPTIONS
121 ; #################
121 ; #################
122
122
123 ; encryption key used to encrypt social plugin tokens,
123 ; encryption key used to encrypt social plugin tokens,
124 ; remote_urls with credentials etc, if not set it defaults to
124 ; remote_urls with credentials etc, if not set it defaults to
125 ; `beaker.session.secret`
125 ; `beaker.session.secret`
126 #rhodecode.encrypted_values.secret =
126 #rhodecode.encrypted_values.secret =
127
127
128 ; decryption strict mode (enabled by default). It controls if decryption raises
128 ; decryption strict mode (enabled by default). It controls if decryption raises
129 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
129 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
130 #rhodecode.encrypted_values.strict = false
130 #rhodecode.encrypted_values.strict = false
131
131
132 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
132 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
133 ; fernet is safer, and we strongly recommend switching to it.
133 ; fernet is safer, and we strongly recommend switching to it.
134 ; Due to backward compatibility aes is used as default.
134 ; Due to backward compatibility aes is used as default.
135 #rhodecode.encrypted_values.algorithm = fernet
135 #rhodecode.encrypted_values.algorithm = fernet
136
136
137 ; Return gzipped responses from RhodeCode (static files/application)
137 ; Return gzipped responses from RhodeCode (static files/application)
138 gzip_responses = false
138 gzip_responses = false
139
139
140 ; Auto-generate javascript routes file on startup
140 ; Auto-generate javascript routes file on startup
141 generate_js_files = false
141 generate_js_files = false
142
142
143 ; System global default language.
143 ; System global default language.
144 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
144 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
145 lang = en
145 lang = en
146
146
147 ; Perform a full repository scan and import on each server start.
147 ; Perform a full repository scan and import on each server start.
148 ; Settings this to true could lead to very long startup time.
148 ; Settings this to true could lead to very long startup time.
149 startup.import_repos = false
149 startup.import_repos = false
150
150
151 ; URL at which the application is running. This is used for Bootstrapping
151 ; URL at which the application is running. This is used for Bootstrapping
152 ; requests in context when no web request is available. Used in ishell, or
152 ; requests in context when no web request is available. Used in ishell, or
153 ; SSH calls. Set this for events to receive proper url for SSH calls.
153 ; SSH calls. Set this for events to receive proper url for SSH calls.
154 app.base_url = http://rhodecode.local
154 app.base_url = http://rhodecode.local
155
155
156 ; Host at which the Service API is running.
156 ; Host at which the Service API is running.
157 app.service_api.host = http://rhodecode.local:10020
157 app.service_api.host = http://rhodecode.local:10020
158
158
159 ; Secret for Service API authentication.
159 ; Secret for Service API authentication.
160 app.service_api.token =
160 app.service_api.token =
161
161
162 ; Unique application ID. Should be a random unique string for security.
162 ; Unique application ID. Should be a random unique string for security.
163 app_instance_uuid = rc-production
163 app_instance_uuid = rc-production
164
164
165 ; Cut off limit for large diffs (size in bytes). If overall diff size on
165 ; Cut off limit for large diffs (size in bytes). If overall diff size on
166 ; commit, or pull request exceeds this limit this diff will be displayed
166 ; commit, or pull request exceeds this limit this diff will be displayed
167 ; partially. E.g 512000 == 512Kb
167 ; partially. E.g 512000 == 512Kb
168 cut_off_limit_diff = 512000
168 cut_off_limit_diff = 512000
169
169
170 ; Cut off limit for large files inside diffs (size in bytes). Each individual
170 ; Cut off limit for large files inside diffs (size in bytes). Each individual
171 ; file inside diff which exceeds this limit will be displayed partially.
171 ; file inside diff which exceeds this limit will be displayed partially.
172 ; E.g 128000 == 128Kb
172 ; E.g 128000 == 128Kb
173 cut_off_limit_file = 128000
173 cut_off_limit_file = 128000
174
174
175 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
175 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
176 vcs_full_cache = true
176 vcs_full_cache = true
177
177
178 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
178 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
179 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
179 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
180 force_https = false
180 force_https = false
181
181
182 ; use Strict-Transport-Security headers
182 ; use Strict-Transport-Security headers
183 use_htsts = false
183 use_htsts = false
184
184
185 ; Set to true if your repos are exposed using the dumb protocol
185 ; Set to true if your repos are exposed using the dumb protocol
186 git_update_server_info = false
186 git_update_server_info = false
187
187
188 ; RSS/ATOM feed options
188 ; RSS/ATOM feed options
189 rss_cut_off_limit = 256000
189 rss_cut_off_limit = 256000
190 rss_items_per_page = 10
190 rss_items_per_page = 10
191 rss_include_diff = false
191 rss_include_diff = false
192
192
193 ; gist URL alias, used to create nicer urls for gist. This should be an
193 ; gist URL alias, used to create nicer urls for gist. This should be an
194 ; url that does rewrites to _admin/gists/{gistid}.
194 ; url that does rewrites to _admin/gists/{gistid}.
195 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
195 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
196 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
196 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
197 gist_alias_url =
197 gist_alias_url =
198
198
199 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
199 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
200 ; used for access.
200 ; used for access.
201 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
201 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
202 ; came from the the logged in user who own this authentication token.
202 ; came from the the logged in user who own this authentication token.
203 ; Additionally @TOKEN syntax can be used to bound the view to specific
203 ; Additionally @TOKEN syntax can be used to bound the view to specific
204 ; authentication token. Such view would be only accessible when used together
204 ; authentication token. Such view would be only accessible when used together
205 ; with this authentication token
205 ; with this authentication token
206 ; list of all views can be found under `/_admin/permissions/auth_token_access`
206 ; list of all views can be found under `/_admin/permissions/auth_token_access`
207 ; The list should be "," separated and on a single line.
207 ; The list should be "," separated and on a single line.
208 ; Most common views to enable:
208 ; Most common views to enable:
209
209
210 # RepoCommitsView:repo_commit_download
210 # RepoCommitsView:repo_commit_download
211 # RepoCommitsView:repo_commit_patch
211 # RepoCommitsView:repo_commit_patch
212 # RepoCommitsView:repo_commit_raw
212 # RepoCommitsView:repo_commit_raw
213 # RepoCommitsView:repo_commit_raw@TOKEN
213 # RepoCommitsView:repo_commit_raw@TOKEN
214 # RepoFilesView:repo_files_diff
214 # RepoFilesView:repo_files_diff
215 # RepoFilesView:repo_archivefile
215 # RepoFilesView:repo_archivefile
216 # RepoFilesView:repo_file_raw
216 # RepoFilesView:repo_file_raw
217 # GistView:*
217 # GistView:*
218 api_access_controllers_whitelist =
218 api_access_controllers_whitelist =
219
219
220 ; Default encoding used to convert from and to unicode
220 ; Default encoding used to convert from and to unicode
221 ; can be also a comma separated list of encoding in case of mixed encodings
221 ; can be also a comma separated list of encoding in case of mixed encodings
222 default_encoding = UTF-8
222 default_encoding = UTF-8
223
223
224 ; instance-id prefix
224 ; instance-id prefix
225 ; a prefix key for this instance used for cache invalidation when running
225 ; a prefix key for this instance used for cache invalidation when running
226 ; multiple instances of RhodeCode, make sure it's globally unique for
226 ; multiple instances of RhodeCode, make sure it's globally unique for
227 ; all running RhodeCode instances. Leave empty if you don't use it
227 ; all running RhodeCode instances. Leave empty if you don't use it
228 instance_id =
228 instance_id =
229
229
230 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
230 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
231 ; of an authentication plugin also if it is disabled by it's settings.
231 ; of an authentication plugin also if it is disabled by it's settings.
232 ; This could be useful if you are unable to log in to the system due to broken
232 ; This could be useful if you are unable to log in to the system due to broken
233 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
233 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
234 ; module to log in again and fix the settings.
234 ; module to log in again and fix the settings.
235 ; Available builtin plugin IDs (hash is part of the ID):
235 ; Available builtin plugin IDs (hash is part of the ID):
236 ; egg:rhodecode-enterprise-ce#rhodecode
236 ; egg:rhodecode-enterprise-ce#rhodecode
237 ; egg:rhodecode-enterprise-ce#pam
237 ; egg:rhodecode-enterprise-ce#pam
238 ; egg:rhodecode-enterprise-ce#ldap
238 ; egg:rhodecode-enterprise-ce#ldap
239 ; egg:rhodecode-enterprise-ce#jasig_cas
239 ; egg:rhodecode-enterprise-ce#jasig_cas
240 ; egg:rhodecode-enterprise-ce#headers
240 ; egg:rhodecode-enterprise-ce#headers
241 ; egg:rhodecode-enterprise-ce#crowd
241 ; egg:rhodecode-enterprise-ce#crowd
242
242
243 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
243 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
244
244
245 ; Flag to control loading of legacy plugins in py:/path format
245 ; Flag to control loading of legacy plugins in py:/path format
246 auth_plugin.import_legacy_plugins = true
246 auth_plugin.import_legacy_plugins = true
247
247
248 ; alternative return HTTP header for failed authentication. Default HTTP
248 ; alternative return HTTP header for failed authentication. Default HTTP
249 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
249 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
250 ; handling that causing a series of failed authentication calls.
250 ; handling that causing a series of failed authentication calls.
251 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
251 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
252 ; This will be served instead of default 401 on bad authentication
252 ; This will be served instead of default 401 on bad authentication
253 auth_ret_code =
253 auth_ret_code =
254
254
255 ; use special detection method when serving auth_ret_code, instead of serving
255 ; use special detection method when serving auth_ret_code, instead of serving
256 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
256 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
257 ; and then serve auth_ret_code to clients
257 ; and then serve auth_ret_code to clients
258 auth_ret_code_detection = false
258 auth_ret_code_detection = false
259
259
260 ; locking return code. When repository is locked return this HTTP code. 2XX
260 ; locking return code. When repository is locked return this HTTP code. 2XX
261 ; codes don't break the transactions while 4XX codes do
261 ; codes don't break the transactions while 4XX codes do
262 lock_ret_code = 423
262 lock_ret_code = 423
263
263
264 ; allows to change the repository location in settings page
264 ; Filesystem location were repositories should be stored
265 allow_repo_location_change = true
265 repo_store.path = /var/opt/rhodecode_repo_store
266
266
267 ; allows to setup custom hooks in settings page
267 ; allows to setup custom hooks in settings page
268 allow_custom_hooks_settings = true
268 allow_custom_hooks_settings = true
269
269
270 ; Generated license token required for EE edition license.
270 ; Generated license token required for EE edition license.
271 ; New generated token value can be found in Admin > settings > license page.
271 ; New generated token value can be found in Admin > settings > license page.
272 license_token =
272 license_token =
273
273
274 ; This flag hides sensitive information on the license page such as token, and license data
274 ; This flag hides sensitive information on the license page such as token, and license data
275 license.hide_license_info = false
275 license.hide_license_info = false
276
276
277 ; supervisor connection uri, for managing supervisor and logs.
277 ; supervisor connection uri, for managing supervisor and logs.
278 supervisor.uri =
278 supervisor.uri =
279
279
280 ; supervisord group name/id we only want this RC instance to handle
280 ; supervisord group name/id we only want this RC instance to handle
281 supervisor.group_id = dev
281 supervisor.group_id = dev
282
282
283 ; Display extended labs settings
283 ; Display extended labs settings
284 labs_settings_active = true
284 labs_settings_active = true
285
285
286 ; Custom exception store path, defaults to TMPDIR
286 ; Custom exception store path, defaults to TMPDIR
287 ; This is used to store exception from RhodeCode in shared directory
287 ; This is used to store exception from RhodeCode in shared directory
288 #exception_tracker.store_path =
288 #exception_tracker.store_path =
289
289
290 ; Send email with exception details when it happens
290 ; Send email with exception details when it happens
291 #exception_tracker.send_email = false
291 #exception_tracker.send_email = false
292
292
293 ; Comma separated list of recipients for exception emails,
293 ; Comma separated list of recipients for exception emails,
294 ; e.g admin@rhodecode.com,devops@rhodecode.com
294 ; e.g admin@rhodecode.com,devops@rhodecode.com
295 ; Can be left empty, then emails will be sent to ALL super-admins
295 ; Can be left empty, then emails will be sent to ALL super-admins
296 #exception_tracker.send_email_recipients =
296 #exception_tracker.send_email_recipients =
297
297
298 ; optional prefix to Add to email Subject
298 ; optional prefix to Add to email Subject
299 #exception_tracker.email_prefix = [RHODECODE ERROR]
299 #exception_tracker.email_prefix = [RHODECODE ERROR]
300
300
301 ; File store configuration. This is used to store and serve uploaded files
301 ; File store configuration. This is used to store and serve uploaded files
302 file_store.enabled = true
302 file_store.enabled = true
303
303
304 ; Storage backend, available options are: local
304 ; Storage backend, available options are: local
305 file_store.backend = local
305 file_store.backend = local
306
306
307 ; path to store the uploaded binaries
307 ; path to store the uploaded binaries
308 file_store.storage_path = /var/opt/rhodecode_data/file_store
308 file_store.storage_path = /var/opt/rhodecode_data/file_store
309
309
310 ; Uncomment and set this path to control settings for archive download cache.
310 ; Uncomment and set this path to control settings for archive download cache.
311 ; Generated repo archives will be cached at this location
311 ; Generated repo archives will be cached at this location
312 ; and served from the cache during subsequent requests for the same archive of
312 ; and served from the cache during subsequent requests for the same archive of
313 ; the repository. This path is important to be shared across filesystems and with
313 ; the repository. This path is important to be shared across filesystems and with
314 ; RhodeCode and vcsserver
314 ; RhodeCode and vcsserver
315
315
316 ; Default is $cache_dir/archive_cache if not set
316 ; Default is $cache_dir/archive_cache if not set
317 archive_cache.store_dir = /var/opt/rhodecode_data/tarballcache
317 archive_cache.store_dir = /var/opt/rhodecode_data/tarballcache
318
318
319 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
319 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
320 archive_cache.cache_size_gb = 10
320 archive_cache.cache_size_gb = 10
321
321
322 ; By default cache uses sharding technique, this specifies how many shards are there
322 ; By default cache uses sharding technique, this specifies how many shards are there
323 archive_cache.cache_shards = 10
323 archive_cache.cache_shards = 10
324
324
325 ; #############
325 ; #############
326 ; CELERY CONFIG
326 ; CELERY CONFIG
327 ; #############
327 ; #############
328
328
329 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
329 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
330
330
331 use_celery = true
331 use_celery = true
332
332
333 ; path to store schedule database
333 ; path to store schedule database
334 #celerybeat-schedule.path =
334 #celerybeat-schedule.path =
335
335
336 ; connection url to the message broker (default redis)
336 ; connection url to the message broker (default redis)
337 celery.broker_url = redis://redis:6379/8
337 celery.broker_url = redis://redis:6379/8
338
338
339 ; results backend to get results for (default redis)
339 ; results backend to get results for (default redis)
340 celery.result_backend = redis://redis:6379/8
340 celery.result_backend = redis://redis:6379/8
341
341
342 ; rabbitmq example
342 ; rabbitmq example
343 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
343 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
344
344
345 ; maximum tasks to execute before worker restart
345 ; maximum tasks to execute before worker restart
346 celery.max_tasks_per_child = 20
346 celery.max_tasks_per_child = 20
347
347
348 ; tasks will never be sent to the queue, but executed locally instead.
348 ; tasks will never be sent to the queue, but executed locally instead.
349 celery.task_always_eager = false
349 celery.task_always_eager = false
350
350
351 ; #############
351 ; #############
352 ; DOGPILE CACHE
352 ; DOGPILE CACHE
353 ; #############
353 ; #############
354
354
355 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
355 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
356 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
356 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
357 cache_dir = /var/opt/rhodecode_data
357 cache_dir = /var/opt/rhodecode_data
358
358
359 ; *********************************************
359 ; *********************************************
360 ; `sql_cache_short` cache for heavy SQL queries
360 ; `sql_cache_short` cache for heavy SQL queries
361 ; Only supported backend is `memory_lru`
361 ; Only supported backend is `memory_lru`
362 ; *********************************************
362 ; *********************************************
363 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
363 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
364 rc_cache.sql_cache_short.expiration_time = 30
364 rc_cache.sql_cache_short.expiration_time = 30
365
365
366
366
367 ; *****************************************************
367 ; *****************************************************
368 ; `cache_repo_longterm` cache for repo object instances
368 ; `cache_repo_longterm` cache for repo object instances
369 ; Only supported backend is `memory_lru`
369 ; Only supported backend is `memory_lru`
370 ; *****************************************************
370 ; *****************************************************
371 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
371 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
372 ; by default we use 30 Days, cache is still invalidated on push
372 ; by default we use 30 Days, cache is still invalidated on push
373 rc_cache.cache_repo_longterm.expiration_time = 2592000
373 rc_cache.cache_repo_longterm.expiration_time = 2592000
374 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
374 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
375 rc_cache.cache_repo_longterm.max_size = 10000
375 rc_cache.cache_repo_longterm.max_size = 10000
376
376
377
377
378 ; *********************************************
378 ; *********************************************
379 ; `cache_general` cache for general purpose use
379 ; `cache_general` cache for general purpose use
380 ; for simplicity use rc.file_namespace backend,
380 ; for simplicity use rc.file_namespace backend,
381 ; for performance and scale use rc.redis
381 ; for performance and scale use rc.redis
382 ; *********************************************
382 ; *********************************************
383 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
383 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
384 rc_cache.cache_general.expiration_time = 43200
384 rc_cache.cache_general.expiration_time = 43200
385 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
385 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
386 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
386 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
387
387
388 ; alternative `cache_general` redis backend with distributed lock
388 ; alternative `cache_general` redis backend with distributed lock
389 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
389 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
390 #rc_cache.cache_general.expiration_time = 300
390 #rc_cache.cache_general.expiration_time = 300
391
391
392 ; redis_expiration_time needs to be greater then expiration_time
392 ; redis_expiration_time needs to be greater then expiration_time
393 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
393 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
394
394
395 #rc_cache.cache_general.arguments.host = localhost
395 #rc_cache.cache_general.arguments.host = localhost
396 #rc_cache.cache_general.arguments.port = 6379
396 #rc_cache.cache_general.arguments.port = 6379
397 #rc_cache.cache_general.arguments.db = 0
397 #rc_cache.cache_general.arguments.db = 0
398 #rc_cache.cache_general.arguments.socket_timeout = 30
398 #rc_cache.cache_general.arguments.socket_timeout = 30
399 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
399 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
400 #rc_cache.cache_general.arguments.distributed_lock = true
400 #rc_cache.cache_general.arguments.distributed_lock = true
401
401
402 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
402 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
403 #rc_cache.cache_general.arguments.lock_auto_renewal = true
403 #rc_cache.cache_general.arguments.lock_auto_renewal = true
404
404
405 ; *************************************************
405 ; *************************************************
406 ; `cache_perms` cache for permission tree, auth TTL
406 ; `cache_perms` cache for permission tree, auth TTL
407 ; for simplicity use rc.file_namespace backend,
407 ; for simplicity use rc.file_namespace backend,
408 ; for performance and scale use rc.redis
408 ; for performance and scale use rc.redis
409 ; *************************************************
409 ; *************************************************
410 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
410 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
411 rc_cache.cache_perms.expiration_time = 3600
411 rc_cache.cache_perms.expiration_time = 3600
412 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
412 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
413 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
413 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
414
414
415 ; alternative `cache_perms` redis backend with distributed lock
415 ; alternative `cache_perms` redis backend with distributed lock
416 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
416 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
417 #rc_cache.cache_perms.expiration_time = 300
417 #rc_cache.cache_perms.expiration_time = 300
418
418
419 ; redis_expiration_time needs to be greater then expiration_time
419 ; redis_expiration_time needs to be greater then expiration_time
420 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
420 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
421
421
422 #rc_cache.cache_perms.arguments.host = localhost
422 #rc_cache.cache_perms.arguments.host = localhost
423 #rc_cache.cache_perms.arguments.port = 6379
423 #rc_cache.cache_perms.arguments.port = 6379
424 #rc_cache.cache_perms.arguments.db = 0
424 #rc_cache.cache_perms.arguments.db = 0
425 #rc_cache.cache_perms.arguments.socket_timeout = 30
425 #rc_cache.cache_perms.arguments.socket_timeout = 30
426 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
426 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
427 #rc_cache.cache_perms.arguments.distributed_lock = true
427 #rc_cache.cache_perms.arguments.distributed_lock = true
428
428
429 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
429 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
430 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
430 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
431
431
432 ; ***************************************************
432 ; ***************************************************
433 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
433 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
434 ; for simplicity use rc.file_namespace backend,
434 ; for simplicity use rc.file_namespace backend,
435 ; for performance and scale use rc.redis
435 ; for performance and scale use rc.redis
436 ; ***************************************************
436 ; ***************************************************
437 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
437 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
438 rc_cache.cache_repo.expiration_time = 2592000
438 rc_cache.cache_repo.expiration_time = 2592000
439 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
439 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
440 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
440 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
441
441
442 ; alternative `cache_repo` redis backend with distributed lock
442 ; alternative `cache_repo` redis backend with distributed lock
443 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
443 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
444 #rc_cache.cache_repo.expiration_time = 2592000
444 #rc_cache.cache_repo.expiration_time = 2592000
445
445
446 ; redis_expiration_time needs to be greater then expiration_time
446 ; redis_expiration_time needs to be greater then expiration_time
447 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
447 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
448
448
449 #rc_cache.cache_repo.arguments.host = localhost
449 #rc_cache.cache_repo.arguments.host = localhost
450 #rc_cache.cache_repo.arguments.port = 6379
450 #rc_cache.cache_repo.arguments.port = 6379
451 #rc_cache.cache_repo.arguments.db = 1
451 #rc_cache.cache_repo.arguments.db = 1
452 #rc_cache.cache_repo.arguments.socket_timeout = 30
452 #rc_cache.cache_repo.arguments.socket_timeout = 30
453 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
453 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
454 #rc_cache.cache_repo.arguments.distributed_lock = true
454 #rc_cache.cache_repo.arguments.distributed_lock = true
455
455
456 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
456 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
457 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
457 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
458
458
459 ; ##############
459 ; ##############
460 ; BEAKER SESSION
460 ; BEAKER SESSION
461 ; ##############
461 ; ##############
462
462
463 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
463 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
464 ; types are file, ext:redis, ext:database, ext:memcached
464 ; types are file, ext:redis, ext:database, ext:memcached
465 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
465 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
466 #beaker.session.type = file
466 #beaker.session.type = file
467 #beaker.session.data_dir = %(here)s/data/sessions
467 #beaker.session.data_dir = %(here)s/data/sessions
468
468
469 ; Redis based sessions
469 ; Redis based sessions
470 beaker.session.type = ext:redis
470 beaker.session.type = ext:redis
471 beaker.session.url = redis://redis:6379/2
471 beaker.session.url = redis://redis:6379/2
472
472
473 ; DB based session, fast, and allows easy management over logged in users
473 ; DB based session, fast, and allows easy management over logged in users
474 #beaker.session.type = ext:database
474 #beaker.session.type = ext:database
475 #beaker.session.table_name = db_session
475 #beaker.session.table_name = db_session
476 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
476 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
477 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
477 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
478 #beaker.session.sa.pool_recycle = 3600
478 #beaker.session.sa.pool_recycle = 3600
479 #beaker.session.sa.echo = false
479 #beaker.session.sa.echo = false
480
480
481 beaker.session.key = rhodecode
481 beaker.session.key = rhodecode
482 beaker.session.secret = develop-rc-uytcxaz
482 beaker.session.secret = develop-rc-uytcxaz
483 beaker.session.lock_dir = /data_ramdisk/lock
483 beaker.session.lock_dir = /data_ramdisk/lock
484
484
485 ; Secure encrypted cookie. Requires AES and AES python libraries
485 ; Secure encrypted cookie. Requires AES and AES python libraries
486 ; you must disable beaker.session.secret to use this
486 ; you must disable beaker.session.secret to use this
487 #beaker.session.encrypt_key = key_for_encryption
487 #beaker.session.encrypt_key = key_for_encryption
488 #beaker.session.validate_key = validation_key
488 #beaker.session.validate_key = validation_key
489
489
490 ; Sets session as invalid (also logging out user) if it haven not been
490 ; Sets session as invalid (also logging out user) if it haven not been
491 ; accessed for given amount of time in seconds
491 ; accessed for given amount of time in seconds
492 beaker.session.timeout = 2592000
492 beaker.session.timeout = 2592000
493 beaker.session.httponly = true
493 beaker.session.httponly = true
494
494
495 ; Path to use for the cookie. Set to prefix if you use prefix middleware
495 ; Path to use for the cookie. Set to prefix if you use prefix middleware
496 #beaker.session.cookie_path = /custom_prefix
496 #beaker.session.cookie_path = /custom_prefix
497
497
498 ; Set https secure cookie
498 ; Set https secure cookie
499 beaker.session.secure = false
499 beaker.session.secure = false
500
500
501 ; default cookie expiration time in seconds, set to `true` to set expire
501 ; default cookie expiration time in seconds, set to `true` to set expire
502 ; at browser close
502 ; at browser close
503 #beaker.session.cookie_expires = 3600
503 #beaker.session.cookie_expires = 3600
504
504
505 ; #############################
505 ; #############################
506 ; SEARCH INDEXING CONFIGURATION
506 ; SEARCH INDEXING CONFIGURATION
507 ; #############################
507 ; #############################
508
508
509 ; Full text search indexer is available in rhodecode-tools under
509 ; Full text search indexer is available in rhodecode-tools under
510 ; `rhodecode-tools index` command
510 ; `rhodecode-tools index` command
511
511
512 ; WHOOSH Backend, doesn't require additional services to run
512 ; WHOOSH Backend, doesn't require additional services to run
513 ; it works good with few dozen repos
513 ; it works good with few dozen repos
514 search.module = rhodecode.lib.index.whoosh
514 search.module = rhodecode.lib.index.whoosh
515 search.location = %(here)s/data/index
515 search.location = %(here)s/data/index
516
516
517 ; ####################
517 ; ####################
518 ; CHANNELSTREAM CONFIG
518 ; CHANNELSTREAM CONFIG
519 ; ####################
519 ; ####################
520
520
521 ; channelstream enables persistent connections and live notification
521 ; channelstream enables persistent connections and live notification
522 ; in the system. It's also used by the chat system
522 ; in the system. It's also used by the chat system
523
523
524 channelstream.enabled = true
524 channelstream.enabled = true
525
525
526 ; server address for channelstream server on the backend
526 ; server address for channelstream server on the backend
527 channelstream.server = channelstream:9800
527 channelstream.server = channelstream:9800
528
528
529 ; location of the channelstream server from outside world
529 ; location of the channelstream server from outside world
530 ; use ws:// for http or wss:// for https. This address needs to be handled
530 ; use ws:// for http or wss:// for https. This address needs to be handled
531 ; by external HTTP server such as Nginx or Apache
531 ; by external HTTP server such as Nginx or Apache
532 ; see Nginx/Apache configuration examples in our docs
532 ; see Nginx/Apache configuration examples in our docs
533 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
533 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
534 channelstream.secret = ENV_GENERATED
534 channelstream.secret = ENV_GENERATED
535 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
535 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
536
536
537 ; Internal application path that Javascript uses to connect into.
537 ; Internal application path that Javascript uses to connect into.
538 ; If you use proxy-prefix the prefix should be added before /_channelstream
538 ; If you use proxy-prefix the prefix should be added before /_channelstream
539 channelstream.proxy_path = /_channelstream
539 channelstream.proxy_path = /_channelstream
540
540
541
541
542 ; ##############################
542 ; ##############################
543 ; MAIN RHODECODE DATABASE CONFIG
543 ; MAIN RHODECODE DATABASE CONFIG
544 ; ##############################
544 ; ##############################
545
545
546 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
546 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
547 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
547 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
548 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
548 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
549 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
549 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
550 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
550 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
551
551
552 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
552 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
553
553
554 ; see sqlalchemy docs for other advanced settings
554 ; see sqlalchemy docs for other advanced settings
555 ; print the sql statements to output
555 ; print the sql statements to output
556 sqlalchemy.db1.echo = false
556 sqlalchemy.db1.echo = false
557
557
558 ; recycle the connections after this amount of seconds
558 ; recycle the connections after this amount of seconds
559 sqlalchemy.db1.pool_recycle = 3600
559 sqlalchemy.db1.pool_recycle = 3600
560
560
561 ; the number of connections to keep open inside the connection pool.
561 ; the number of connections to keep open inside the connection pool.
562 ; 0 indicates no limit
562 ; 0 indicates no limit
563 ; the general calculus with gevent is:
563 ; the general calculus with gevent is:
564 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
564 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
565 ; then increase pool size + max overflow so that they add up to 500.
565 ; then increase pool size + max overflow so that they add up to 500.
566 #sqlalchemy.db1.pool_size = 5
566 #sqlalchemy.db1.pool_size = 5
567
567
568 ; The number of connections to allow in connection pool "overflow", that is
568 ; The number of connections to allow in connection pool "overflow", that is
569 ; connections that can be opened above and beyond the pool_size setting,
569 ; connections that can be opened above and beyond the pool_size setting,
570 ; which defaults to five.
570 ; which defaults to five.
571 #sqlalchemy.db1.max_overflow = 10
571 #sqlalchemy.db1.max_overflow = 10
572
572
573 ; Connection check ping, used to detect broken database connections
573 ; Connection check ping, used to detect broken database connections
574 ; could be enabled to better handle cases if MySQL has gone away errors
574 ; could be enabled to better handle cases if MySQL has gone away errors
575 #sqlalchemy.db1.ping_connection = true
575 #sqlalchemy.db1.ping_connection = true
576
576
577 ; ##########
577 ; ##########
578 ; VCS CONFIG
578 ; VCS CONFIG
579 ; ##########
579 ; ##########
580 vcs.server.enable = true
580 vcs.server.enable = true
581 vcs.server = vcsserver:10010
581 vcs.server = vcsserver:10010
582
582
583 ; Web server connectivity protocol, responsible for web based VCS operations
583 ; Web server connectivity protocol, responsible for web based VCS operations
584 ; Available protocols are:
584 ; Available protocols are:
585 ; `http` - use http-rpc backend (default)
585 ; `http` - use http-rpc backend (default)
586 vcs.server.protocol = http
586 vcs.server.protocol = http
587
587
588 ; Push/Pull operations protocol, available options are:
588 ; Push/Pull operations protocol, available options are:
589 ; `http` - use http-rpc backend (default)
589 ; `http` - use http-rpc backend (default)
590 vcs.scm_app_implementation = http
590 vcs.scm_app_implementation = http
591
591
592 ; Push/Pull operations hooks protocol, available options are:
592 ; Push/Pull operations hooks protocol, available options are:
593 ; `http` - use http-rpc backend (default)
593 ; `http` - use http-rpc backend (default)
594 vcs.hooks.protocol = http
594 vcs.hooks.protocol = http
595
595
596 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
596 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
597 ; accessible via network.
597 ; accessible via network.
598 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
598 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
599 vcs.hooks.host = *
599 vcs.hooks.host = *
600
600
601 ; Start VCSServer with this instance as a subprocess, useful for development
601 ; Start VCSServer with this instance as a subprocess, useful for development
602 vcs.start_server = false
602 vcs.start_server = false
603
603
604 ; List of enabled VCS backends, available options are:
604 ; List of enabled VCS backends, available options are:
605 ; `hg` - mercurial
605 ; `hg` - mercurial
606 ; `git` - git
606 ; `git` - git
607 ; `svn` - subversion
607 ; `svn` - subversion
608 vcs.backends = hg, git, svn
608 vcs.backends = hg, git, svn
609
609
610 ; Wait this number of seconds before killing connection to the vcsserver
610 ; Wait this number of seconds before killing connection to the vcsserver
611 vcs.connection_timeout = 3600
611 vcs.connection_timeout = 3600
612
612
613 ; Cache flag to cache vcsserver remote calls locally
613 ; Cache flag to cache vcsserver remote calls locally
614 ; It uses cache_region `cache_repo`
614 ; It uses cache_region `cache_repo`
615 vcs.methods.cache = true
615 vcs.methods.cache = true
616
616
617 ; ####################################################
617 ; ####################################################
618 ; Subversion proxy support (mod_dav_svn)
618 ; Subversion proxy support (mod_dav_svn)
619 ; Maps RhodeCode repo groups into SVN paths for Apache
619 ; Maps RhodeCode repo groups into SVN paths for Apache
620 ; ####################################################
620 ; ####################################################
621
621
622 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
622 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
623 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
623 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
624 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
624 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
625 #vcs.svn.compatible_version = 1.8
625 #vcs.svn.compatible_version = 1.8
626
626
627 ; Enable SVN proxy of requests over HTTP
627 ; Enable SVN proxy of requests over HTTP
628 vcs.svn.proxy.enabled = true
628 vcs.svn.proxy.enabled = true
629
629
630 ; host to connect to running SVN subsystem
630 ; host to connect to running SVN subsystem
631 vcs.svn.proxy.host = http://svn:8090
631 vcs.svn.proxy.host = http://svn:8090
632
632
633 ; Enable or disable the config file generation.
633 ; Enable or disable the config file generation.
634 svn.proxy.generate_config = true
634 svn.proxy.generate_config = true
635
635
636 ; Generate config file with `SVNListParentPath` set to `On`.
636 ; Generate config file with `SVNListParentPath` set to `On`.
637 svn.proxy.list_parent_path = true
637 svn.proxy.list_parent_path = true
638
638
639 ; Set location and file name of generated config file.
639 ; Set location and file name of generated config file.
640 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
640 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
641
641
642 ; alternative mod_dav config template. This needs to be a valid mako template
642 ; alternative mod_dav config template. This needs to be a valid mako template
643 ; Example template can be found in the source code:
643 ; Example template can be found in the source code:
644 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
644 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
645 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
645 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
646
646
647 ; Used as a prefix to the `Location` block in the generated config file.
647 ; Used as a prefix to the `Location` block in the generated config file.
648 ; In most cases it should be set to `/`.
648 ; In most cases it should be set to `/`.
649 svn.proxy.location_root = /
649 svn.proxy.location_root = /
650
650
651 ; Command to reload the mod dav svn configuration on change.
651 ; Command to reload the mod dav svn configuration on change.
652 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
652 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
653 ; Make sure user who runs RhodeCode process is allowed to reload Apache
653 ; Make sure user who runs RhodeCode process is allowed to reload Apache
654 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
654 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
655
655
656 ; If the timeout expires before the reload command finishes, the command will
656 ; If the timeout expires before the reload command finishes, the command will
657 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
657 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
658 #svn.proxy.reload_timeout = 10
658 #svn.proxy.reload_timeout = 10
659
659
660 ; ####################
660 ; ####################
661 ; SSH Support Settings
661 ; SSH Support Settings
662 ; ####################
662 ; ####################
663
663
664 ; Defines if a custom authorized_keys file should be created and written on
664 ; Defines if a custom authorized_keys file should be created and written on
665 ; any change user ssh keys. Setting this to false also disables possibility
665 ; any change user ssh keys. Setting this to false also disables possibility
666 ; of adding SSH keys by users from web interface. Super admins can still
666 ; of adding SSH keys by users from web interface. Super admins can still
667 ; manage SSH Keys.
667 ; manage SSH Keys.
668 ssh.generate_authorized_keyfile = true
668 ssh.generate_authorized_keyfile = true
669
669
670 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
670 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
671 # ssh.authorized_keys_ssh_opts =
671 # ssh.authorized_keys_ssh_opts =
672
672
673 ; Path to the authorized_keys file where the generate entries are placed.
673 ; Path to the authorized_keys file where the generate entries are placed.
674 ; It is possible to have multiple key files specified in `sshd_config` e.g.
674 ; It is possible to have multiple key files specified in `sshd_config` e.g.
675 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
675 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
676 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
676 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
677
677
678 ; Command to execute the SSH wrapper. The binary is available in the
678 ; Command to execute the SSH wrapper. The binary is available in the
679 ; RhodeCode installation directory.
679 ; RhodeCode installation directory.
680 ; e.g /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
680 ; e.g /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
681 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
681 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
682
682
683 ; Allow shell when executing the ssh-wrapper command
683 ; Allow shell when executing the ssh-wrapper command
684 ssh.wrapper_cmd_allow_shell = false
684 ssh.wrapper_cmd_allow_shell = false
685
685
686 ; Enables logging, and detailed output send back to the client during SSH
686 ; Enables logging, and detailed output send back to the client during SSH
687 ; operations. Useful for debugging, shouldn't be used in production.
687 ; operations. Useful for debugging, shouldn't be used in production.
688 ssh.enable_debug_logging = true
688 ssh.enable_debug_logging = true
689
689
690 ; Paths to binary executable, by default they are the names, but we can
690 ; Paths to binary executable, by default they are the names, but we can
691 ; override them if we want to use a custom one
691 ; override them if we want to use a custom one
692 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
692 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
693 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
693 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
694 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
694 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
695
695
696 ; Enables SSH key generator web interface. Disabling this still allows users
696 ; Enables SSH key generator web interface. Disabling this still allows users
697 ; to add their own keys.
697 ; to add their own keys.
698 ssh.enable_ui_key_generator = true
698 ssh.enable_ui_key_generator = true
699
699
700 ; Statsd client config, this is used to send metrics to statsd
700 ; Statsd client config, this is used to send metrics to statsd
701 ; We recommend setting statsd_exported and scrape them using Prometheus
701 ; We recommend setting statsd_exported and scrape them using Prometheus
702 #statsd.enabled = false
702 #statsd.enabled = false
703 #statsd.statsd_host = 0.0.0.0
703 #statsd.statsd_host = 0.0.0.0
704 #statsd.statsd_port = 8125
704 #statsd.statsd_port = 8125
705 #statsd.statsd_prefix =
705 #statsd.statsd_prefix =
706 #statsd.statsd_ipv6 = false
706 #statsd.statsd_ipv6 = false
707
707
708 ; configure logging automatically at server startup set to false
708 ; configure logging automatically at server startup set to false
709 ; to use the below custom logging config.
709 ; to use the below custom logging config.
710 ; RC_LOGGING_FORMATTER
710 ; RC_LOGGING_FORMATTER
711 ; RC_LOGGING_LEVEL
711 ; RC_LOGGING_LEVEL
712 ; env variables can control the settings for logging in case of autoconfigure
712 ; env variables can control the settings for logging in case of autoconfigure
713
713
714 #logging.autoconfigure = true
714 #logging.autoconfigure = true
715
715
716 ; specify your own custom logging config file to configure logging
716 ; specify your own custom logging config file to configure logging
717 #logging.logging_conf_file = /path/to/custom_logging.ini
717 #logging.logging_conf_file = /path/to/custom_logging.ini
718
718
719 ; Dummy marker to add new entries after.
719 ; Dummy marker to add new entries after.
720 ; Add any custom entries below. Please don't remove this marker.
720 ; Add any custom entries below. Please don't remove this marker.
721 custom.conf = 1
721 custom.conf = 1
722
722
723
723
724 ; #####################
724 ; #####################
725 ; LOGGING CONFIGURATION
725 ; LOGGING CONFIGURATION
726 ; #####################
726 ; #####################
727
727
728 [loggers]
728 [loggers]
729 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
729 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
730
730
731 [handlers]
731 [handlers]
732 keys = console, console_sql
732 keys = console, console_sql
733
733
734 [formatters]
734 [formatters]
735 keys = generic, json, color_formatter, color_formatter_sql
735 keys = generic, json, color_formatter, color_formatter_sql
736
736
737 ; #######
737 ; #######
738 ; LOGGERS
738 ; LOGGERS
739 ; #######
739 ; #######
740 [logger_root]
740 [logger_root]
741 level = NOTSET
741 level = NOTSET
742 handlers = console
742 handlers = console
743
743
744 [logger_sqlalchemy]
744 [logger_sqlalchemy]
745 level = INFO
745 level = INFO
746 handlers = console_sql
746 handlers = console_sql
747 qualname = sqlalchemy.engine
747 qualname = sqlalchemy.engine
748 propagate = 0
748 propagate = 0
749
749
750 [logger_beaker]
750 [logger_beaker]
751 level = DEBUG
751 level = DEBUG
752 handlers =
752 handlers =
753 qualname = beaker.container
753 qualname = beaker.container
754 propagate = 1
754 propagate = 1
755
755
756 [logger_rhodecode]
756 [logger_rhodecode]
757 level = DEBUG
757 level = DEBUG
758 handlers =
758 handlers =
759 qualname = rhodecode
759 qualname = rhodecode
760 propagate = 1
760 propagate = 1
761
761
762 [logger_ssh_wrapper]
762 [logger_ssh_wrapper]
763 level = DEBUG
763 level = DEBUG
764 handlers =
764 handlers =
765 qualname = ssh_wrapper
765 qualname = ssh_wrapper
766 propagate = 1
766 propagate = 1
767
767
768 [logger_celery]
768 [logger_celery]
769 level = DEBUG
769 level = DEBUG
770 handlers =
770 handlers =
771 qualname = celery
771 qualname = celery
772
772
773
773
774 ; ########
774 ; ########
775 ; HANDLERS
775 ; HANDLERS
776 ; ########
776 ; ########
777
777
778 [handler_console]
778 [handler_console]
779 class = StreamHandler
779 class = StreamHandler
780 args = (sys.stderr, )
780 args = (sys.stderr, )
781 level = DEBUG
781 level = DEBUG
782 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
782 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
783 ; This allows sending properly formatted logs to grafana loki or elasticsearch
783 ; This allows sending properly formatted logs to grafana loki or elasticsearch
784 formatter = color_formatter
784 formatter = color_formatter
785
785
786 [handler_console_sql]
786 [handler_console_sql]
787 ; "level = DEBUG" logs SQL queries and results.
787 ; "level = DEBUG" logs SQL queries and results.
788 ; "level = INFO" logs SQL queries.
788 ; "level = INFO" logs SQL queries.
789 ; "level = WARN" logs neither. (Recommended for production systems.)
789 ; "level = WARN" logs neither. (Recommended for production systems.)
790 class = StreamHandler
790 class = StreamHandler
791 args = (sys.stderr, )
791 args = (sys.stderr, )
792 level = WARN
792 level = WARN
793 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
793 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
794 ; This allows sending properly formatted logs to grafana loki or elasticsearch
794 ; This allows sending properly formatted logs to grafana loki or elasticsearch
795 formatter = color_formatter_sql
795 formatter = color_formatter_sql
796
796
797 ; ##########
797 ; ##########
798 ; FORMATTERS
798 ; FORMATTERS
799 ; ##########
799 ; ##########
800
800
801 [formatter_generic]
801 [formatter_generic]
802 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
802 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
803 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
803 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
804 datefmt = %Y-%m-%d %H:%M:%S
804 datefmt = %Y-%m-%d %H:%M:%S
805
805
806 [formatter_color_formatter]
806 [formatter_color_formatter]
807 class = rhodecode.lib.logging_formatter.ColorFormatter
807 class = rhodecode.lib.logging_formatter.ColorFormatter
808 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
808 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
809 datefmt = %Y-%m-%d %H:%M:%S
809 datefmt = %Y-%m-%d %H:%M:%S
810
810
811 [formatter_color_formatter_sql]
811 [formatter_color_formatter_sql]
812 class = rhodecode.lib.logging_formatter.ColorFormatterSql
812 class = rhodecode.lib.logging_formatter.ColorFormatterSql
813 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
813 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
814 datefmt = %Y-%m-%d %H:%M:%S
814 datefmt = %Y-%m-%d %H:%M:%S
815
815
816 [formatter_json]
816 [formatter_json]
817 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
817 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
818 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
818 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,769 +1,769 b''
1
1
2 ; #########################################
2 ; #########################################
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 ; #########################################
4 ; #########################################
5
5
6 [DEFAULT]
6 [DEFAULT]
7 ; Debug flag sets all loggers to debug, and enables request tracking
7 ; Debug flag sets all loggers to debug, and enables request tracking
8 debug = false
8 debug = false
9
9
10 ; ########################################################################
10 ; ########################################################################
11 ; EMAIL CONFIGURATION
11 ; EMAIL CONFIGURATION
12 ; These settings will be used by the RhodeCode mailing system
12 ; These settings will be used by the RhodeCode mailing system
13 ; ########################################################################
13 ; ########################################################################
14
14
15 ; prefix all emails subjects with given prefix, helps filtering out emails
15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 #email_prefix = [RhodeCode]
16 #email_prefix = [RhodeCode]
17
17
18 ; email FROM address all mails will be sent
18 ; email FROM address all mails will be sent
19 #app_email_from = rhodecode-noreply@localhost
19 #app_email_from = rhodecode-noreply@localhost
20
20
21 #smtp_server = mail.server.com
21 #smtp_server = mail.server.com
22 #smtp_username =
22 #smtp_username =
23 #smtp_password =
23 #smtp_password =
24 #smtp_port =
24 #smtp_port =
25 #smtp_use_tls = false
25 #smtp_use_tls = false
26 #smtp_use_ssl = true
26 #smtp_use_ssl = true
27
27
28 [server:main]
28 [server:main]
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 host = 127.0.0.1
31 host = 127.0.0.1
32 port = 10020
32 port = 10020
33
33
34
34
35 ; ###########################
35 ; ###########################
36 ; GUNICORN APPLICATION SERVER
36 ; GUNICORN APPLICATION SERVER
37 ; ###########################
37 ; ###########################
38
38
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40
40
41 ; Module to use, this setting shouldn't be changed
41 ; Module to use, this setting shouldn't be changed
42 use = egg:gunicorn#main
42 use = egg:gunicorn#main
43
43
44 ; Prefix middleware for RhodeCode.
44 ; Prefix middleware for RhodeCode.
45 ; recommended when using proxy setup.
45 ; recommended when using proxy setup.
46 ; allows to set RhodeCode under a prefix in server.
46 ; allows to set RhodeCode under a prefix in server.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 ; And set your prefix like: `prefix = /custom_prefix`
48 ; And set your prefix like: `prefix = /custom_prefix`
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 ; to make your cookies only work on prefix url
50 ; to make your cookies only work on prefix url
51 [filter:proxy-prefix]
51 [filter:proxy-prefix]
52 use = egg:PasteDeploy#prefix
52 use = egg:PasteDeploy#prefix
53 prefix = /
53 prefix = /
54
54
55 [app:main]
55 [app:main]
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 ; of this file
57 ; of this file
58 ; Each option in the app:main can be override by an environmental variable
58 ; Each option in the app:main can be override by an environmental variable
59 ;
59 ;
60 ;To override an option:
60 ;To override an option:
61 ;
61 ;
62 ;RC_<KeyName>
62 ;RC_<KeyName>
63 ;Everything should be uppercase, . and - should be replaced by _.
63 ;Everything should be uppercase, . and - should be replaced by _.
64 ;For example, if you have these configuration settings:
64 ;For example, if you have these configuration settings:
65 ;rc_cache.repo_object.backend = foo
65 ;rc_cache.repo_object.backend = foo
66 ;can be overridden by
66 ;can be overridden by
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68
68
69 use = egg:rhodecode-enterprise-ce
69 use = egg:rhodecode-enterprise-ce
70
70
71 ; enable proxy prefix middleware, defined above
71 ; enable proxy prefix middleware, defined above
72 #filter-with = proxy-prefix
72 #filter-with = proxy-prefix
73
73
74 ; encryption key used to encrypt social plugin tokens,
74 ; encryption key used to encrypt social plugin tokens,
75 ; remote_urls with credentials etc, if not set it defaults to
75 ; remote_urls with credentials etc, if not set it defaults to
76 ; `beaker.session.secret`
76 ; `beaker.session.secret`
77 #rhodecode.encrypted_values.secret =
77 #rhodecode.encrypted_values.secret =
78
78
79 ; decryption strict mode (enabled by default). It controls if decryption raises
79 ; decryption strict mode (enabled by default). It controls if decryption raises
80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 #rhodecode.encrypted_values.strict = false
81 #rhodecode.encrypted_values.strict = false
82
82
83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 ; fernet is safer, and we strongly recommend switching to it.
84 ; fernet is safer, and we strongly recommend switching to it.
85 ; Due to backward compatibility aes is used as default.
85 ; Due to backward compatibility aes is used as default.
86 #rhodecode.encrypted_values.algorithm = fernet
86 #rhodecode.encrypted_values.algorithm = fernet
87
87
88 ; Return gzipped responses from RhodeCode (static files/application)
88 ; Return gzipped responses from RhodeCode (static files/application)
89 gzip_responses = false
89 gzip_responses = false
90
90
91 ; Auto-generate javascript routes file on startup
91 ; Auto-generate javascript routes file on startup
92 generate_js_files = false
92 generate_js_files = false
93
93
94 ; System global default language.
94 ; System global default language.
95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 lang = en
96 lang = en
97
97
98 ; Perform a full repository scan and import on each server start.
98 ; Perform a full repository scan and import on each server start.
99 ; Settings this to true could lead to very long startup time.
99 ; Settings this to true could lead to very long startup time.
100 startup.import_repos = false
100 startup.import_repos = false
101
101
102 ; URL at which the application is running. This is used for Bootstrapping
102 ; URL at which the application is running. This is used for Bootstrapping
103 ; requests in context when no web request is available. Used in ishell, or
103 ; requests in context when no web request is available. Used in ishell, or
104 ; SSH calls. Set this for events to receive proper url for SSH calls.
104 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 app.base_url = http://rhodecode.local
105 app.base_url = http://rhodecode.local
106
106
107 ; Host at which the Service API is running.
107 ; Host at which the Service API is running.
108 app.service_api.host = http://rhodecode.local:10020
108 app.service_api.host = http://rhodecode.local:10020
109
109
110 ; Secret for Service API authentication.
110 ; Secret for Service API authentication.
111 app.service_api.token =
111 app.service_api.token =
112
112
113 ; Unique application ID. Should be a random unique string for security.
113 ; Unique application ID. Should be a random unique string for security.
114 app_instance_uuid = rc-production
114 app_instance_uuid = rc-production
115
115
116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
117 ; commit, or pull request exceeds this limit this diff will be displayed
117 ; commit, or pull request exceeds this limit this diff will be displayed
118 ; partially. E.g 512000 == 512Kb
118 ; partially. E.g 512000 == 512Kb
119 cut_off_limit_diff = 512000
119 cut_off_limit_diff = 512000
120
120
121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
122 ; file inside diff which exceeds this limit will be displayed partially.
122 ; file inside diff which exceeds this limit will be displayed partially.
123 ; E.g 128000 == 128Kb
123 ; E.g 128000 == 128Kb
124 cut_off_limit_file = 128000
124 cut_off_limit_file = 128000
125
125
126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
127 vcs_full_cache = true
127 vcs_full_cache = true
128
128
129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
131 force_https = false
131 force_https = false
132
132
133 ; use Strict-Transport-Security headers
133 ; use Strict-Transport-Security headers
134 use_htsts = false
134 use_htsts = false
135
135
136 ; Set to true if your repos are exposed using the dumb protocol
136 ; Set to true if your repos are exposed using the dumb protocol
137 git_update_server_info = false
137 git_update_server_info = false
138
138
139 ; RSS/ATOM feed options
139 ; RSS/ATOM feed options
140 rss_cut_off_limit = 256000
140 rss_cut_off_limit = 256000
141 rss_items_per_page = 10
141 rss_items_per_page = 10
142 rss_include_diff = false
142 rss_include_diff = false
143
143
144 ; gist URL alias, used to create nicer urls for gist. This should be an
144 ; gist URL alias, used to create nicer urls for gist. This should be an
145 ; url that does rewrites to _admin/gists/{gistid}.
145 ; url that does rewrites to _admin/gists/{gistid}.
146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
148 gist_alias_url =
148 gist_alias_url =
149
149
150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
151 ; used for access.
151 ; used for access.
152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
153 ; came from the the logged in user who own this authentication token.
153 ; came from the the logged in user who own this authentication token.
154 ; Additionally @TOKEN syntax can be used to bound the view to specific
154 ; Additionally @TOKEN syntax can be used to bound the view to specific
155 ; authentication token. Such view would be only accessible when used together
155 ; authentication token. Such view would be only accessible when used together
156 ; with this authentication token
156 ; with this authentication token
157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
158 ; The list should be "," separated and on a single line.
158 ; The list should be "," separated and on a single line.
159 ; Most common views to enable:
159 ; Most common views to enable:
160
160
161 # RepoCommitsView:repo_commit_download
161 # RepoCommitsView:repo_commit_download
162 # RepoCommitsView:repo_commit_patch
162 # RepoCommitsView:repo_commit_patch
163 # RepoCommitsView:repo_commit_raw
163 # RepoCommitsView:repo_commit_raw
164 # RepoCommitsView:repo_commit_raw@TOKEN
164 # RepoCommitsView:repo_commit_raw@TOKEN
165 # RepoFilesView:repo_files_diff
165 # RepoFilesView:repo_files_diff
166 # RepoFilesView:repo_archivefile
166 # RepoFilesView:repo_archivefile
167 # RepoFilesView:repo_file_raw
167 # RepoFilesView:repo_file_raw
168 # GistView:*
168 # GistView:*
169 api_access_controllers_whitelist =
169 api_access_controllers_whitelist =
170
170
171 ; Default encoding used to convert from and to unicode
171 ; Default encoding used to convert from and to unicode
172 ; can be also a comma separated list of encoding in case of mixed encodings
172 ; can be also a comma separated list of encoding in case of mixed encodings
173 default_encoding = UTF-8
173 default_encoding = UTF-8
174
174
175 ; instance-id prefix
175 ; instance-id prefix
176 ; a prefix key for this instance used for cache invalidation when running
176 ; a prefix key for this instance used for cache invalidation when running
177 ; multiple instances of RhodeCode, make sure it's globally unique for
177 ; multiple instances of RhodeCode, make sure it's globally unique for
178 ; all running RhodeCode instances. Leave empty if you don't use it
178 ; all running RhodeCode instances. Leave empty if you don't use it
179 instance_id =
179 instance_id =
180
180
181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
182 ; of an authentication plugin also if it is disabled by it's settings.
182 ; of an authentication plugin also if it is disabled by it's settings.
183 ; This could be useful if you are unable to log in to the system due to broken
183 ; This could be useful if you are unable to log in to the system due to broken
184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
185 ; module to log in again and fix the settings.
185 ; module to log in again and fix the settings.
186 ; Available builtin plugin IDs (hash is part of the ID):
186 ; Available builtin plugin IDs (hash is part of the ID):
187 ; egg:rhodecode-enterprise-ce#rhodecode
187 ; egg:rhodecode-enterprise-ce#rhodecode
188 ; egg:rhodecode-enterprise-ce#pam
188 ; egg:rhodecode-enterprise-ce#pam
189 ; egg:rhodecode-enterprise-ce#ldap
189 ; egg:rhodecode-enterprise-ce#ldap
190 ; egg:rhodecode-enterprise-ce#jasig_cas
190 ; egg:rhodecode-enterprise-ce#jasig_cas
191 ; egg:rhodecode-enterprise-ce#headers
191 ; egg:rhodecode-enterprise-ce#headers
192 ; egg:rhodecode-enterprise-ce#crowd
192 ; egg:rhodecode-enterprise-ce#crowd
193
193
194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
195
195
196 ; Flag to control loading of legacy plugins in py:/path format
196 ; Flag to control loading of legacy plugins in py:/path format
197 auth_plugin.import_legacy_plugins = true
197 auth_plugin.import_legacy_plugins = true
198
198
199 ; alternative return HTTP header for failed authentication. Default HTTP
199 ; alternative return HTTP header for failed authentication. Default HTTP
200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
201 ; handling that causing a series of failed authentication calls.
201 ; handling that causing a series of failed authentication calls.
202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
203 ; This will be served instead of default 401 on bad authentication
203 ; This will be served instead of default 401 on bad authentication
204 auth_ret_code =
204 auth_ret_code =
205
205
206 ; use special detection method when serving auth_ret_code, instead of serving
206 ; use special detection method when serving auth_ret_code, instead of serving
207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
208 ; and then serve auth_ret_code to clients
208 ; and then serve auth_ret_code to clients
209 auth_ret_code_detection = false
209 auth_ret_code_detection = false
210
210
211 ; locking return code. When repository is locked return this HTTP code. 2XX
211 ; locking return code. When repository is locked return this HTTP code. 2XX
212 ; codes don't break the transactions while 4XX codes do
212 ; codes don't break the transactions while 4XX codes do
213 lock_ret_code = 423
213 lock_ret_code = 423
214
214
215 ; allows to change the repository location in settings page
215 ; Filesystem location were repositories should be stored
216 allow_repo_location_change = true
216 repo_store.path = /var/opt/rhodecode_repo_store
217
217
218 ; allows to setup custom hooks in settings page
218 ; allows to setup custom hooks in settings page
219 allow_custom_hooks_settings = true
219 allow_custom_hooks_settings = true
220
220
221 ; Generated license token required for EE edition license.
221 ; Generated license token required for EE edition license.
222 ; New generated token value can be found in Admin > settings > license page.
222 ; New generated token value can be found in Admin > settings > license page.
223 license_token =
223 license_token =
224
224
225 ; This flag hides sensitive information on the license page such as token, and license data
225 ; This flag hides sensitive information on the license page such as token, and license data
226 license.hide_license_info = false
226 license.hide_license_info = false
227
227
228 ; supervisor connection uri, for managing supervisor and logs.
228 ; supervisor connection uri, for managing supervisor and logs.
229 supervisor.uri =
229 supervisor.uri =
230
230
231 ; supervisord group name/id we only want this RC instance to handle
231 ; supervisord group name/id we only want this RC instance to handle
232 supervisor.group_id = prod
232 supervisor.group_id = prod
233
233
234 ; Display extended labs settings
234 ; Display extended labs settings
235 labs_settings_active = true
235 labs_settings_active = true
236
236
237 ; Custom exception store path, defaults to TMPDIR
237 ; Custom exception store path, defaults to TMPDIR
238 ; This is used to store exception from RhodeCode in shared directory
238 ; This is used to store exception from RhodeCode in shared directory
239 #exception_tracker.store_path =
239 #exception_tracker.store_path =
240
240
241 ; Send email with exception details when it happens
241 ; Send email with exception details when it happens
242 #exception_tracker.send_email = false
242 #exception_tracker.send_email = false
243
243
244 ; Comma separated list of recipients for exception emails,
244 ; Comma separated list of recipients for exception emails,
245 ; e.g admin@rhodecode.com,devops@rhodecode.com
245 ; e.g admin@rhodecode.com,devops@rhodecode.com
246 ; Can be left empty, then emails will be sent to ALL super-admins
246 ; Can be left empty, then emails will be sent to ALL super-admins
247 #exception_tracker.send_email_recipients =
247 #exception_tracker.send_email_recipients =
248
248
249 ; optional prefix to Add to email Subject
249 ; optional prefix to Add to email Subject
250 #exception_tracker.email_prefix = [RHODECODE ERROR]
250 #exception_tracker.email_prefix = [RHODECODE ERROR]
251
251
252 ; File store configuration. This is used to store and serve uploaded files
252 ; File store configuration. This is used to store and serve uploaded files
253 file_store.enabled = true
253 file_store.enabled = true
254
254
255 ; Storage backend, available options are: local
255 ; Storage backend, available options are: local
256 file_store.backend = local
256 file_store.backend = local
257
257
258 ; path to store the uploaded binaries
258 ; path to store the uploaded binaries
259 file_store.storage_path = /var/opt/rhodecode_data/file_store
259 file_store.storage_path = /var/opt/rhodecode_data/file_store
260
260
261 ; Uncomment and set this path to control settings for archive download cache.
261 ; Uncomment and set this path to control settings for archive download cache.
262 ; Generated repo archives will be cached at this location
262 ; Generated repo archives will be cached at this location
263 ; and served from the cache during subsequent requests for the same archive of
263 ; and served from the cache during subsequent requests for the same archive of
264 ; the repository. This path is important to be shared across filesystems and with
264 ; the repository. This path is important to be shared across filesystems and with
265 ; RhodeCode and vcsserver
265 ; RhodeCode and vcsserver
266
266
267 ; Default is $cache_dir/archive_cache if not set
267 ; Default is $cache_dir/archive_cache if not set
268 archive_cache.store_dir = /var/opt/rhodecode_data/tarballcache
268 archive_cache.store_dir = /var/opt/rhodecode_data/tarballcache
269
269
270 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
270 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
271 archive_cache.cache_size_gb = 40
271 archive_cache.cache_size_gb = 40
272
272
273 ; By default cache uses sharding technique, this specifies how many shards are there
273 ; By default cache uses sharding technique, this specifies how many shards are there
274 archive_cache.cache_shards = 4
274 archive_cache.cache_shards = 4
275
275
276 ; #############
276 ; #############
277 ; CELERY CONFIG
277 ; CELERY CONFIG
278 ; #############
278 ; #############
279
279
280 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
280 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
281
281
282 use_celery = true
282 use_celery = true
283
283
284 ; path to store schedule database
284 ; path to store schedule database
285 #celerybeat-schedule.path =
285 #celerybeat-schedule.path =
286
286
287 ; connection url to the message broker (default redis)
287 ; connection url to the message broker (default redis)
288 celery.broker_url = redis://redis:6379/8
288 celery.broker_url = redis://redis:6379/8
289
289
290 ; results backend to get results for (default redis)
290 ; results backend to get results for (default redis)
291 celery.result_backend = redis://redis:6379/8
291 celery.result_backend = redis://redis:6379/8
292
292
293 ; rabbitmq example
293 ; rabbitmq example
294 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
294 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
295
295
296 ; maximum tasks to execute before worker restart
296 ; maximum tasks to execute before worker restart
297 celery.max_tasks_per_child = 20
297 celery.max_tasks_per_child = 20
298
298
299 ; tasks will never be sent to the queue, but executed locally instead.
299 ; tasks will never be sent to the queue, but executed locally instead.
300 celery.task_always_eager = false
300 celery.task_always_eager = false
301
301
302 ; #############
302 ; #############
303 ; DOGPILE CACHE
303 ; DOGPILE CACHE
304 ; #############
304 ; #############
305
305
306 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
306 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
307 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
307 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
308 cache_dir = /var/opt/rhodecode_data
308 cache_dir = /var/opt/rhodecode_data
309
309
310 ; *********************************************
310 ; *********************************************
311 ; `sql_cache_short` cache for heavy SQL queries
311 ; `sql_cache_short` cache for heavy SQL queries
312 ; Only supported backend is `memory_lru`
312 ; Only supported backend is `memory_lru`
313 ; *********************************************
313 ; *********************************************
314 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
314 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
315 rc_cache.sql_cache_short.expiration_time = 30
315 rc_cache.sql_cache_short.expiration_time = 30
316
316
317
317
318 ; *****************************************************
318 ; *****************************************************
319 ; `cache_repo_longterm` cache for repo object instances
319 ; `cache_repo_longterm` cache for repo object instances
320 ; Only supported backend is `memory_lru`
320 ; Only supported backend is `memory_lru`
321 ; *****************************************************
321 ; *****************************************************
322 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
322 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
323 ; by default we use 30 Days, cache is still invalidated on push
323 ; by default we use 30 Days, cache is still invalidated on push
324 rc_cache.cache_repo_longterm.expiration_time = 2592000
324 rc_cache.cache_repo_longterm.expiration_time = 2592000
325 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
325 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
326 rc_cache.cache_repo_longterm.max_size = 10000
326 rc_cache.cache_repo_longterm.max_size = 10000
327
327
328
328
329 ; *********************************************
329 ; *********************************************
330 ; `cache_general` cache for general purpose use
330 ; `cache_general` cache for general purpose use
331 ; for simplicity use rc.file_namespace backend,
331 ; for simplicity use rc.file_namespace backend,
332 ; for performance and scale use rc.redis
332 ; for performance and scale use rc.redis
333 ; *********************************************
333 ; *********************************************
334 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
334 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
335 rc_cache.cache_general.expiration_time = 43200
335 rc_cache.cache_general.expiration_time = 43200
336 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
336 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
337 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
337 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
338
338
339 ; alternative `cache_general` redis backend with distributed lock
339 ; alternative `cache_general` redis backend with distributed lock
340 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
340 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
341 #rc_cache.cache_general.expiration_time = 300
341 #rc_cache.cache_general.expiration_time = 300
342
342
343 ; redis_expiration_time needs to be greater then expiration_time
343 ; redis_expiration_time needs to be greater then expiration_time
344 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
344 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
345
345
346 #rc_cache.cache_general.arguments.host = localhost
346 #rc_cache.cache_general.arguments.host = localhost
347 #rc_cache.cache_general.arguments.port = 6379
347 #rc_cache.cache_general.arguments.port = 6379
348 #rc_cache.cache_general.arguments.db = 0
348 #rc_cache.cache_general.arguments.db = 0
349 #rc_cache.cache_general.arguments.socket_timeout = 30
349 #rc_cache.cache_general.arguments.socket_timeout = 30
350 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
350 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
351 #rc_cache.cache_general.arguments.distributed_lock = true
351 #rc_cache.cache_general.arguments.distributed_lock = true
352
352
353 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
353 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
354 #rc_cache.cache_general.arguments.lock_auto_renewal = true
354 #rc_cache.cache_general.arguments.lock_auto_renewal = true
355
355
356 ; *************************************************
356 ; *************************************************
357 ; `cache_perms` cache for permission tree, auth TTL
357 ; `cache_perms` cache for permission tree, auth TTL
358 ; for simplicity use rc.file_namespace backend,
358 ; for simplicity use rc.file_namespace backend,
359 ; for performance and scale use rc.redis
359 ; for performance and scale use rc.redis
360 ; *************************************************
360 ; *************************************************
361 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
361 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
362 rc_cache.cache_perms.expiration_time = 3600
362 rc_cache.cache_perms.expiration_time = 3600
363 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
363 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
364 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
364 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
365
365
366 ; alternative `cache_perms` redis backend with distributed lock
366 ; alternative `cache_perms` redis backend with distributed lock
367 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
367 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
368 #rc_cache.cache_perms.expiration_time = 300
368 #rc_cache.cache_perms.expiration_time = 300
369
369
370 ; redis_expiration_time needs to be greater then expiration_time
370 ; redis_expiration_time needs to be greater then expiration_time
371 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
371 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
372
372
373 #rc_cache.cache_perms.arguments.host = localhost
373 #rc_cache.cache_perms.arguments.host = localhost
374 #rc_cache.cache_perms.arguments.port = 6379
374 #rc_cache.cache_perms.arguments.port = 6379
375 #rc_cache.cache_perms.arguments.db = 0
375 #rc_cache.cache_perms.arguments.db = 0
376 #rc_cache.cache_perms.arguments.socket_timeout = 30
376 #rc_cache.cache_perms.arguments.socket_timeout = 30
377 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
377 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
378 #rc_cache.cache_perms.arguments.distributed_lock = true
378 #rc_cache.cache_perms.arguments.distributed_lock = true
379
379
380 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
380 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
381 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
381 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
382
382
383 ; ***************************************************
383 ; ***************************************************
384 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
384 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
385 ; for simplicity use rc.file_namespace backend,
385 ; for simplicity use rc.file_namespace backend,
386 ; for performance and scale use rc.redis
386 ; for performance and scale use rc.redis
387 ; ***************************************************
387 ; ***************************************************
388 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
388 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
389 rc_cache.cache_repo.expiration_time = 2592000
389 rc_cache.cache_repo.expiration_time = 2592000
390 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
390 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
391 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
391 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
392
392
393 ; alternative `cache_repo` redis backend with distributed lock
393 ; alternative `cache_repo` redis backend with distributed lock
394 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
394 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
395 #rc_cache.cache_repo.expiration_time = 2592000
395 #rc_cache.cache_repo.expiration_time = 2592000
396
396
397 ; redis_expiration_time needs to be greater then expiration_time
397 ; redis_expiration_time needs to be greater then expiration_time
398 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
398 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
399
399
400 #rc_cache.cache_repo.arguments.host = localhost
400 #rc_cache.cache_repo.arguments.host = localhost
401 #rc_cache.cache_repo.arguments.port = 6379
401 #rc_cache.cache_repo.arguments.port = 6379
402 #rc_cache.cache_repo.arguments.db = 1
402 #rc_cache.cache_repo.arguments.db = 1
403 #rc_cache.cache_repo.arguments.socket_timeout = 30
403 #rc_cache.cache_repo.arguments.socket_timeout = 30
404 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
404 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
405 #rc_cache.cache_repo.arguments.distributed_lock = true
405 #rc_cache.cache_repo.arguments.distributed_lock = true
406
406
407 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
407 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
408 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
408 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
409
409
410 ; ##############
410 ; ##############
411 ; BEAKER SESSION
411 ; BEAKER SESSION
412 ; ##############
412 ; ##############
413
413
414 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
414 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
415 ; types are file, ext:redis, ext:database, ext:memcached
415 ; types are file, ext:redis, ext:database, ext:memcached
416 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
416 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
417 #beaker.session.type = file
417 #beaker.session.type = file
418 #beaker.session.data_dir = %(here)s/data/sessions
418 #beaker.session.data_dir = %(here)s/data/sessions
419
419
420 ; Redis based sessions
420 ; Redis based sessions
421 beaker.session.type = ext:redis
421 beaker.session.type = ext:redis
422 beaker.session.url = redis://redis:6379/2
422 beaker.session.url = redis://redis:6379/2
423
423
424 ; DB based session, fast, and allows easy management over logged in users
424 ; DB based session, fast, and allows easy management over logged in users
425 #beaker.session.type = ext:database
425 #beaker.session.type = ext:database
426 #beaker.session.table_name = db_session
426 #beaker.session.table_name = db_session
427 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
427 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
428 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
428 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
429 #beaker.session.sa.pool_recycle = 3600
429 #beaker.session.sa.pool_recycle = 3600
430 #beaker.session.sa.echo = false
430 #beaker.session.sa.echo = false
431
431
432 beaker.session.key = rhodecode
432 beaker.session.key = rhodecode
433 beaker.session.secret = production-rc-uytcxaz
433 beaker.session.secret = production-rc-uytcxaz
434 beaker.session.lock_dir = /data_ramdisk/lock
434 beaker.session.lock_dir = /data_ramdisk/lock
435
435
436 ; Secure encrypted cookie. Requires AES and AES python libraries
436 ; Secure encrypted cookie. Requires AES and AES python libraries
437 ; you must disable beaker.session.secret to use this
437 ; you must disable beaker.session.secret to use this
438 #beaker.session.encrypt_key = key_for_encryption
438 #beaker.session.encrypt_key = key_for_encryption
439 #beaker.session.validate_key = validation_key
439 #beaker.session.validate_key = validation_key
440
440
441 ; Sets session as invalid (also logging out user) if it haven not been
441 ; Sets session as invalid (also logging out user) if it haven not been
442 ; accessed for given amount of time in seconds
442 ; accessed for given amount of time in seconds
443 beaker.session.timeout = 2592000
443 beaker.session.timeout = 2592000
444 beaker.session.httponly = true
444 beaker.session.httponly = true
445
445
446 ; Path to use for the cookie. Set to prefix if you use prefix middleware
446 ; Path to use for the cookie. Set to prefix if you use prefix middleware
447 #beaker.session.cookie_path = /custom_prefix
447 #beaker.session.cookie_path = /custom_prefix
448
448
449 ; Set https secure cookie
449 ; Set https secure cookie
450 beaker.session.secure = false
450 beaker.session.secure = false
451
451
452 ; default cookie expiration time in seconds, set to `true` to set expire
452 ; default cookie expiration time in seconds, set to `true` to set expire
453 ; at browser close
453 ; at browser close
454 #beaker.session.cookie_expires = 3600
454 #beaker.session.cookie_expires = 3600
455
455
456 ; #############################
456 ; #############################
457 ; SEARCH INDEXING CONFIGURATION
457 ; SEARCH INDEXING CONFIGURATION
458 ; #############################
458 ; #############################
459
459
460 ; Full text search indexer is available in rhodecode-tools under
460 ; Full text search indexer is available in rhodecode-tools under
461 ; `rhodecode-tools index` command
461 ; `rhodecode-tools index` command
462
462
463 ; WHOOSH Backend, doesn't require additional services to run
463 ; WHOOSH Backend, doesn't require additional services to run
464 ; it works good with few dozen repos
464 ; it works good with few dozen repos
465 search.module = rhodecode.lib.index.whoosh
465 search.module = rhodecode.lib.index.whoosh
466 search.location = %(here)s/data/index
466 search.location = %(here)s/data/index
467
467
468 ; ####################
468 ; ####################
469 ; CHANNELSTREAM CONFIG
469 ; CHANNELSTREAM CONFIG
470 ; ####################
470 ; ####################
471
471
472 ; channelstream enables persistent connections and live notification
472 ; channelstream enables persistent connections and live notification
473 ; in the system. It's also used by the chat system
473 ; in the system. It's also used by the chat system
474
474
475 channelstream.enabled = true
475 channelstream.enabled = true
476
476
477 ; server address for channelstream server on the backend
477 ; server address for channelstream server on the backend
478 channelstream.server = channelstream:9800
478 channelstream.server = channelstream:9800
479
479
480 ; location of the channelstream server from outside world
480 ; location of the channelstream server from outside world
481 ; use ws:// for http or wss:// for https. This address needs to be handled
481 ; use ws:// for http or wss:// for https. This address needs to be handled
482 ; by external HTTP server such as Nginx or Apache
482 ; by external HTTP server such as Nginx or Apache
483 ; see Nginx/Apache configuration examples in our docs
483 ; see Nginx/Apache configuration examples in our docs
484 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
484 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
485 channelstream.secret = ENV_GENERATED
485 channelstream.secret = ENV_GENERATED
486 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
486 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
487
487
488 ; Internal application path that Javascript uses to connect into.
488 ; Internal application path that Javascript uses to connect into.
489 ; If you use proxy-prefix the prefix should be added before /_channelstream
489 ; If you use proxy-prefix the prefix should be added before /_channelstream
490 channelstream.proxy_path = /_channelstream
490 channelstream.proxy_path = /_channelstream
491
491
492
492
493 ; ##############################
493 ; ##############################
494 ; MAIN RHODECODE DATABASE CONFIG
494 ; MAIN RHODECODE DATABASE CONFIG
495 ; ##############################
495 ; ##############################
496
496
497 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
497 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
498 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
498 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
499 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
499 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
500 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
500 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
501 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
501 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
502
502
503 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
503 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
504
504
505 ; see sqlalchemy docs for other advanced settings
505 ; see sqlalchemy docs for other advanced settings
506 ; print the sql statements to output
506 ; print the sql statements to output
507 sqlalchemy.db1.echo = false
507 sqlalchemy.db1.echo = false
508
508
509 ; recycle the connections after this amount of seconds
509 ; recycle the connections after this amount of seconds
510 sqlalchemy.db1.pool_recycle = 3600
510 sqlalchemy.db1.pool_recycle = 3600
511
511
512 ; the number of connections to keep open inside the connection pool.
512 ; the number of connections to keep open inside the connection pool.
513 ; 0 indicates no limit
513 ; 0 indicates no limit
514 ; the general calculus with gevent is:
514 ; the general calculus with gevent is:
515 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
515 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
516 ; then increase pool size + max overflow so that they add up to 500.
516 ; then increase pool size + max overflow so that they add up to 500.
517 #sqlalchemy.db1.pool_size = 5
517 #sqlalchemy.db1.pool_size = 5
518
518
519 ; The number of connections to allow in connection pool "overflow", that is
519 ; The number of connections to allow in connection pool "overflow", that is
520 ; connections that can be opened above and beyond the pool_size setting,
520 ; connections that can be opened above and beyond the pool_size setting,
521 ; which defaults to five.
521 ; which defaults to five.
522 #sqlalchemy.db1.max_overflow = 10
522 #sqlalchemy.db1.max_overflow = 10
523
523
524 ; Connection check ping, used to detect broken database connections
524 ; Connection check ping, used to detect broken database connections
525 ; could be enabled to better handle cases if MySQL has gone away errors
525 ; could be enabled to better handle cases if MySQL has gone away errors
526 #sqlalchemy.db1.ping_connection = true
526 #sqlalchemy.db1.ping_connection = true
527
527
528 ; ##########
528 ; ##########
529 ; VCS CONFIG
529 ; VCS CONFIG
530 ; ##########
530 ; ##########
531 vcs.server.enable = true
531 vcs.server.enable = true
532 vcs.server = vcsserver:10010
532 vcs.server = vcsserver:10010
533
533
534 ; Web server connectivity protocol, responsible for web based VCS operations
534 ; Web server connectivity protocol, responsible for web based VCS operations
535 ; Available protocols are:
535 ; Available protocols are:
536 ; `http` - use http-rpc backend (default)
536 ; `http` - use http-rpc backend (default)
537 vcs.server.protocol = http
537 vcs.server.protocol = http
538
538
539 ; Push/Pull operations protocol, available options are:
539 ; Push/Pull operations protocol, available options are:
540 ; `http` - use http-rpc backend (default)
540 ; `http` - use http-rpc backend (default)
541 vcs.scm_app_implementation = http
541 vcs.scm_app_implementation = http
542
542
543 ; Push/Pull operations hooks protocol, available options are:
543 ; Push/Pull operations hooks protocol, available options are:
544 ; `http` - use http-rpc backend (default)
544 ; `http` - use http-rpc backend (default)
545 vcs.hooks.protocol = http
545 vcs.hooks.protocol = http
546
546
547 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
547 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
548 ; accessible via network.
548 ; accessible via network.
549 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
549 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
550 vcs.hooks.host = *
550 vcs.hooks.host = *
551
551
552 ; Start VCSServer with this instance as a subprocess, useful for development
552 ; Start VCSServer with this instance as a subprocess, useful for development
553 vcs.start_server = false
553 vcs.start_server = false
554
554
555 ; List of enabled VCS backends, available options are:
555 ; List of enabled VCS backends, available options are:
556 ; `hg` - mercurial
556 ; `hg` - mercurial
557 ; `git` - git
557 ; `git` - git
558 ; `svn` - subversion
558 ; `svn` - subversion
559 vcs.backends = hg, git, svn
559 vcs.backends = hg, git, svn
560
560
561 ; Wait this number of seconds before killing connection to the vcsserver
561 ; Wait this number of seconds before killing connection to the vcsserver
562 vcs.connection_timeout = 3600
562 vcs.connection_timeout = 3600
563
563
564 ; Cache flag to cache vcsserver remote calls locally
564 ; Cache flag to cache vcsserver remote calls locally
565 ; It uses cache_region `cache_repo`
565 ; It uses cache_region `cache_repo`
566 vcs.methods.cache = true
566 vcs.methods.cache = true
567
567
568 ; ####################################################
568 ; ####################################################
569 ; Subversion proxy support (mod_dav_svn)
569 ; Subversion proxy support (mod_dav_svn)
570 ; Maps RhodeCode repo groups into SVN paths for Apache
570 ; Maps RhodeCode repo groups into SVN paths for Apache
571 ; ####################################################
571 ; ####################################################
572
572
573 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
573 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
574 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
574 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
575 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
575 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
576 #vcs.svn.compatible_version = 1.8
576 #vcs.svn.compatible_version = 1.8
577
577
578 ; Enable SVN proxy of requests over HTTP
578 ; Enable SVN proxy of requests over HTTP
579 vcs.svn.proxy.enabled = true
579 vcs.svn.proxy.enabled = true
580
580
581 ; host to connect to running SVN subsystem
581 ; host to connect to running SVN subsystem
582 vcs.svn.proxy.host = http://svn:8090
582 vcs.svn.proxy.host = http://svn:8090
583
583
584 ; Enable or disable the config file generation.
584 ; Enable or disable the config file generation.
585 svn.proxy.generate_config = true
585 svn.proxy.generate_config = true
586
586
587 ; Generate config file with `SVNListParentPath` set to `On`.
587 ; Generate config file with `SVNListParentPath` set to `On`.
588 svn.proxy.list_parent_path = true
588 svn.proxy.list_parent_path = true
589
589
590 ; Set location and file name of generated config file.
590 ; Set location and file name of generated config file.
591 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
591 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
592
592
593 ; alternative mod_dav config template. This needs to be a valid mako template
593 ; alternative mod_dav config template. This needs to be a valid mako template
594 ; Example template can be found in the source code:
594 ; Example template can be found in the source code:
595 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
595 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
596 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
596 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
597
597
598 ; Used as a prefix to the `Location` block in the generated config file.
598 ; Used as a prefix to the `Location` block in the generated config file.
599 ; In most cases it should be set to `/`.
599 ; In most cases it should be set to `/`.
600 svn.proxy.location_root = /
600 svn.proxy.location_root = /
601
601
602 ; Command to reload the mod dav svn configuration on change.
602 ; Command to reload the mod dav svn configuration on change.
603 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
603 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
604 ; Make sure user who runs RhodeCode process is allowed to reload Apache
604 ; Make sure user who runs RhodeCode process is allowed to reload Apache
605 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
605 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
606
606
607 ; If the timeout expires before the reload command finishes, the command will
607 ; If the timeout expires before the reload command finishes, the command will
608 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
608 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
609 #svn.proxy.reload_timeout = 10
609 #svn.proxy.reload_timeout = 10
610
610
611 ; ####################
611 ; ####################
612 ; SSH Support Settings
612 ; SSH Support Settings
613 ; ####################
613 ; ####################
614
614
615 ; Defines if a custom authorized_keys file should be created and written on
615 ; Defines if a custom authorized_keys file should be created and written on
616 ; any change user ssh keys. Setting this to false also disables possibility
616 ; any change user ssh keys. Setting this to false also disables possibility
617 ; of adding SSH keys by users from web interface. Super admins can still
617 ; of adding SSH keys by users from web interface. Super admins can still
618 ; manage SSH Keys.
618 ; manage SSH Keys.
619 ssh.generate_authorized_keyfile = true
619 ssh.generate_authorized_keyfile = true
620
620
621 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
621 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
622 # ssh.authorized_keys_ssh_opts =
622 # ssh.authorized_keys_ssh_opts =
623
623
624 ; Path to the authorized_keys file where the generate entries are placed.
624 ; Path to the authorized_keys file where the generate entries are placed.
625 ; It is possible to have multiple key files specified in `sshd_config` e.g.
625 ; It is possible to have multiple key files specified in `sshd_config` e.g.
626 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
626 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
627 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
627 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
628
628
629 ; Command to execute the SSH wrapper. The binary is available in the
629 ; Command to execute the SSH wrapper. The binary is available in the
630 ; RhodeCode installation directory.
630 ; RhodeCode installation directory.
631 ; e.g /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
631 ; e.g /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
632 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
632 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
633
633
634 ; Allow shell when executing the ssh-wrapper command
634 ; Allow shell when executing the ssh-wrapper command
635 ssh.wrapper_cmd_allow_shell = false
635 ssh.wrapper_cmd_allow_shell = false
636
636
637 ; Enables logging, and detailed output send back to the client during SSH
637 ; Enables logging, and detailed output send back to the client during SSH
638 ; operations. Useful for debugging, shouldn't be used in production.
638 ; operations. Useful for debugging, shouldn't be used in production.
639 ssh.enable_debug_logging = false
639 ssh.enable_debug_logging = false
640
640
641 ; Paths to binary executable, by default they are the names, but we can
641 ; Paths to binary executable, by default they are the names, but we can
642 ; override them if we want to use a custom one
642 ; override them if we want to use a custom one
643 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
643 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
644 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
644 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
645 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
645 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
646
646
647 ; Enables SSH key generator web interface. Disabling this still allows users
647 ; Enables SSH key generator web interface. Disabling this still allows users
648 ; to add their own keys.
648 ; to add their own keys.
649 ssh.enable_ui_key_generator = true
649 ssh.enable_ui_key_generator = true
650
650
651 ; Statsd client config, this is used to send metrics to statsd
651 ; Statsd client config, this is used to send metrics to statsd
652 ; We recommend setting statsd_exported and scrape them using Prometheus
652 ; We recommend setting statsd_exported and scrape them using Prometheus
653 #statsd.enabled = false
653 #statsd.enabled = false
654 #statsd.statsd_host = 0.0.0.0
654 #statsd.statsd_host = 0.0.0.0
655 #statsd.statsd_port = 8125
655 #statsd.statsd_port = 8125
656 #statsd.statsd_prefix =
656 #statsd.statsd_prefix =
657 #statsd.statsd_ipv6 = false
657 #statsd.statsd_ipv6 = false
658
658
659 ; configure logging automatically at server startup set to false
659 ; configure logging automatically at server startup set to false
660 ; to use the below custom logging config.
660 ; to use the below custom logging config.
661 ; RC_LOGGING_FORMATTER
661 ; RC_LOGGING_FORMATTER
662 ; RC_LOGGING_LEVEL
662 ; RC_LOGGING_LEVEL
663 ; env variables can control the settings for logging in case of autoconfigure
663 ; env variables can control the settings for logging in case of autoconfigure
664
664
665 #logging.autoconfigure = true
665 #logging.autoconfigure = true
666
666
667 ; specify your own custom logging config file to configure logging
667 ; specify your own custom logging config file to configure logging
668 #logging.logging_conf_file = /path/to/custom_logging.ini
668 #logging.logging_conf_file = /path/to/custom_logging.ini
669
669
670 ; Dummy marker to add new entries after.
670 ; Dummy marker to add new entries after.
671 ; Add any custom entries below. Please don't remove this marker.
671 ; Add any custom entries below. Please don't remove this marker.
672 custom.conf = 1
672 custom.conf = 1
673
673
674
674
675 ; #####################
675 ; #####################
676 ; LOGGING CONFIGURATION
676 ; LOGGING CONFIGURATION
677 ; #####################
677 ; #####################
678
678
679 [loggers]
679 [loggers]
680 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
680 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
681
681
682 [handlers]
682 [handlers]
683 keys = console, console_sql
683 keys = console, console_sql
684
684
685 [formatters]
685 [formatters]
686 keys = generic, json, color_formatter, color_formatter_sql
686 keys = generic, json, color_formatter, color_formatter_sql
687
687
688 ; #######
688 ; #######
689 ; LOGGERS
689 ; LOGGERS
690 ; #######
690 ; #######
691 [logger_root]
691 [logger_root]
692 level = NOTSET
692 level = NOTSET
693 handlers = console
693 handlers = console
694
694
695 [logger_sqlalchemy]
695 [logger_sqlalchemy]
696 level = INFO
696 level = INFO
697 handlers = console_sql
697 handlers = console_sql
698 qualname = sqlalchemy.engine
698 qualname = sqlalchemy.engine
699 propagate = 0
699 propagate = 0
700
700
701 [logger_beaker]
701 [logger_beaker]
702 level = DEBUG
702 level = DEBUG
703 handlers =
703 handlers =
704 qualname = beaker.container
704 qualname = beaker.container
705 propagate = 1
705 propagate = 1
706
706
707 [logger_rhodecode]
707 [logger_rhodecode]
708 level = DEBUG
708 level = DEBUG
709 handlers =
709 handlers =
710 qualname = rhodecode
710 qualname = rhodecode
711 propagate = 1
711 propagate = 1
712
712
713 [logger_ssh_wrapper]
713 [logger_ssh_wrapper]
714 level = DEBUG
714 level = DEBUG
715 handlers =
715 handlers =
716 qualname = ssh_wrapper
716 qualname = ssh_wrapper
717 propagate = 1
717 propagate = 1
718
718
719 [logger_celery]
719 [logger_celery]
720 level = DEBUG
720 level = DEBUG
721 handlers =
721 handlers =
722 qualname = celery
722 qualname = celery
723
723
724
724
725 ; ########
725 ; ########
726 ; HANDLERS
726 ; HANDLERS
727 ; ########
727 ; ########
728
728
729 [handler_console]
729 [handler_console]
730 class = StreamHandler
730 class = StreamHandler
731 args = (sys.stderr, )
731 args = (sys.stderr, )
732 level = INFO
732 level = INFO
733 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
733 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
734 ; This allows sending properly formatted logs to grafana loki or elasticsearch
734 ; This allows sending properly formatted logs to grafana loki or elasticsearch
735 formatter = generic
735 formatter = generic
736
736
737 [handler_console_sql]
737 [handler_console_sql]
738 ; "level = DEBUG" logs SQL queries and results.
738 ; "level = DEBUG" logs SQL queries and results.
739 ; "level = INFO" logs SQL queries.
739 ; "level = INFO" logs SQL queries.
740 ; "level = WARN" logs neither. (Recommended for production systems.)
740 ; "level = WARN" logs neither. (Recommended for production systems.)
741 class = StreamHandler
741 class = StreamHandler
742 args = (sys.stderr, )
742 args = (sys.stderr, )
743 level = WARN
743 level = WARN
744 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
744 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
745 ; This allows sending properly formatted logs to grafana loki or elasticsearch
745 ; This allows sending properly formatted logs to grafana loki or elasticsearch
746 formatter = generic
746 formatter = generic
747
747
748 ; ##########
748 ; ##########
749 ; FORMATTERS
749 ; FORMATTERS
750 ; ##########
750 ; ##########
751
751
752 [formatter_generic]
752 [formatter_generic]
753 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
753 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
754 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
754 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
755 datefmt = %Y-%m-%d %H:%M:%S
755 datefmt = %Y-%m-%d %H:%M:%S
756
756
757 [formatter_color_formatter]
757 [formatter_color_formatter]
758 class = rhodecode.lib.logging_formatter.ColorFormatter
758 class = rhodecode.lib.logging_formatter.ColorFormatter
759 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
759 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
760 datefmt = %Y-%m-%d %H:%M:%S
760 datefmt = %Y-%m-%d %H:%M:%S
761
761
762 [formatter_color_formatter_sql]
762 [formatter_color_formatter_sql]
763 class = rhodecode.lib.logging_formatter.ColorFormatterSql
763 class = rhodecode.lib.logging_formatter.ColorFormatterSql
764 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
764 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
765 datefmt = %Y-%m-%d %H:%M:%S
765 datefmt = %Y-%m-%d %H:%M:%S
766
766
767 [formatter_json]
767 [formatter_json]
768 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
768 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
769 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
769 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,424 +1,423 b''
1 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import itertools
20 import itertools
21 import base64
21 import base64
22
22
23 from rhodecode.api import (
23 from rhodecode.api import (
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
25
25
26 from rhodecode.api.utils import (
26 from rhodecode.api.utils import (
27 Optional, OAttr, has_superadmin_permission, get_user_or_error)
27 Optional, OAttr, has_superadmin_permission, get_user_or_error)
28 from rhodecode.lib.utils import repo2db_mapper
28 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
29 from rhodecode.lib import system_info
29 from rhodecode.lib import system_info
30 from rhodecode.lib import user_sessions
30 from rhodecode.lib import user_sessions
31 from rhodecode.lib import exc_tracking
31 from rhodecode.lib import exc_tracking
32 from rhodecode.lib.ext_json import json
32 from rhodecode.lib.ext_json import json
33 from rhodecode.lib.utils2 import safe_int
33 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.model.db import UserIpMap
34 from rhodecode.model.db import UserIpMap
35 from rhodecode.model.scm import ScmModel
35 from rhodecode.model.scm import ScmModel
36 from rhodecode.model.settings import VcsSettingsModel
37 from rhodecode.apps.file_store import utils
36 from rhodecode.apps.file_store import utils
38 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
37 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
39 FileOverSizeException
38 FileOverSizeException
40
39
41 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
42
41
43
42
44 @jsonrpc_method()
43 @jsonrpc_method()
45 def get_server_info(request, apiuser):
44 def get_server_info(request, apiuser):
46 """
45 """
47 Returns the |RCE| server information.
46 Returns the |RCE| server information.
48
47
49 This includes the running version of |RCE| and all installed
48 This includes the running version of |RCE| and all installed
50 packages. This command takes the following options:
49 packages. This command takes the following options:
51
50
52 :param apiuser: This is filled automatically from the |authtoken|.
51 :param apiuser: This is filled automatically from the |authtoken|.
53 :type apiuser: AuthUser
52 :type apiuser: AuthUser
54
53
55 Example output:
54 Example output:
56
55
57 .. code-block:: bash
56 .. code-block:: bash
58
57
59 id : <id_given_in_input>
58 id : <id_given_in_input>
60 result : {
59 result : {
61 'modules': [<module name>,...]
60 'modules': [<module name>,...]
62 'py_version': <python version>,
61 'py_version': <python version>,
63 'platform': <platform type>,
62 'platform': <platform type>,
64 'rhodecode_version': <rhodecode version>
63 'rhodecode_version': <rhodecode version>
65 }
64 }
66 error : null
65 error : null
67 """
66 """
68
67
69 if not has_superadmin_permission(apiuser):
68 if not has_superadmin_permission(apiuser):
70 raise JSONRPCForbidden()
69 raise JSONRPCForbidden()
71
70
72 server_info = ScmModel().get_server_info(request.environ)
71 server_info = ScmModel().get_server_info(request.environ)
73 # rhodecode-index requires those
72 # rhodecode-index requires those
74
73
75 server_info['index_storage'] = server_info['search']['value']['location']
74 server_info['index_storage'] = server_info['search']['value']['location']
76 server_info['storage'] = server_info['storage']['value']['path']
75 server_info['storage'] = server_info['storage']['value']['path']
77
76
78 return server_info
77 return server_info
79
78
80
79
81 @jsonrpc_method()
80 @jsonrpc_method()
82 def get_repo_store(request, apiuser):
81 def get_repo_store(request, apiuser):
83 """
82 """
84 Returns the |RCE| repository storage information.
83 Returns the |RCE| repository storage information.
85
84
86 :param apiuser: This is filled automatically from the |authtoken|.
85 :param apiuser: This is filled automatically from the |authtoken|.
87 :type apiuser: AuthUser
86 :type apiuser: AuthUser
88
87
89 Example output:
88 Example output:
90
89
91 .. code-block:: bash
90 .. code-block:: bash
92
91
93 id : <id_given_in_input>
92 id : <id_given_in_input>
94 result : {
93 result : {
95 'modules': [<module name>,...]
94 'modules': [<module name>,...]
96 'py_version': <python version>,
95 'py_version': <python version>,
97 'platform': <platform type>,
96 'platform': <platform type>,
98 'rhodecode_version': <rhodecode version>
97 'rhodecode_version': <rhodecode version>
99 }
98 }
100 error : null
99 error : null
101 """
100 """
102
101
103 if not has_superadmin_permission(apiuser):
102 if not has_superadmin_permission(apiuser):
104 raise JSONRPCForbidden()
103 raise JSONRPCForbidden()
105
104
106 path = VcsSettingsModel().get_repos_location()
105 path = get_rhodecode_repo_store_path()
107 return {"path": path}
106 return {"path": path}
108
107
109
108
110 @jsonrpc_method()
109 @jsonrpc_method()
111 def get_ip(request, apiuser, userid=Optional(OAttr('apiuser'))):
110 def get_ip(request, apiuser, userid=Optional(OAttr('apiuser'))):
112 """
111 """
113 Displays the IP Address as seen from the |RCE| server.
112 Displays the IP Address as seen from the |RCE| server.
114
113
115 * This command displays the IP Address, as well as all the defined IP
114 * This command displays the IP Address, as well as all the defined IP
116 addresses for the specified user. If the ``userid`` is not set, the
115 addresses for the specified user. If the ``userid`` is not set, the
117 data returned is for the user calling the method.
116 data returned is for the user calling the method.
118
117
119 This command can only be run using an |authtoken| with admin rights to
118 This command can only be run using an |authtoken| with admin rights to
120 the specified repository.
119 the specified repository.
121
120
122 This command takes the following options:
121 This command takes the following options:
123
122
124 :param apiuser: This is filled automatically from |authtoken|.
123 :param apiuser: This is filled automatically from |authtoken|.
125 :type apiuser: AuthUser
124 :type apiuser: AuthUser
126 :param userid: Sets the userid for which associated IP Address data
125 :param userid: Sets the userid for which associated IP Address data
127 is returned.
126 is returned.
128 :type userid: Optional(str or int)
127 :type userid: Optional(str or int)
129
128
130 Example output:
129 Example output:
131
130
132 .. code-block:: bash
131 .. code-block:: bash
133
132
134 id : <id_given_in_input>
133 id : <id_given_in_input>
135 result : {
134 result : {
136 "server_ip_addr": "<ip_from_clien>",
135 "server_ip_addr": "<ip_from_clien>",
137 "user_ips": [
136 "user_ips": [
138 {
137 {
139 "ip_addr": "<ip_with_mask>",
138 "ip_addr": "<ip_with_mask>",
140 "ip_range": ["<start_ip>", "<end_ip>"],
139 "ip_range": ["<start_ip>", "<end_ip>"],
141 },
140 },
142 ...
141 ...
143 ]
142 ]
144 }
143 }
145
144
146 """
145 """
147 if not has_superadmin_permission(apiuser):
146 if not has_superadmin_permission(apiuser):
148 raise JSONRPCForbidden()
147 raise JSONRPCForbidden()
149
148
150 userid = Optional.extract(userid, evaluate_locals=locals())
149 userid = Optional.extract(userid, evaluate_locals=locals())
151 userid = getattr(userid, 'user_id', userid)
150 userid = getattr(userid, 'user_id', userid)
152
151
153 user = get_user_or_error(userid)
152 user = get_user_or_error(userid)
154 ips = UserIpMap.query().filter(UserIpMap.user == user).all()
153 ips = UserIpMap.query().filter(UserIpMap.user == user).all()
155 return {
154 return {
156 'server_ip_addr': request.rpc_ip_addr,
155 'server_ip_addr': request.rpc_ip_addr,
157 'user_ips': ips
156 'user_ips': ips
158 }
157 }
159
158
160
159
161 @jsonrpc_method()
160 @jsonrpc_method()
162 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
161 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
163 """
162 """
164 Triggers a rescan of the specified repositories.
163 Triggers a rescan of the specified repositories.
165
164
166 * If the ``remove_obsolete`` option is set, it also deletes repositories
165 * If the ``remove_obsolete`` option is set, it also deletes repositories
167 that are found in the database but not on the file system, so called
166 that are found in the database but not on the file system, so called
168 "clean zombies".
167 "clean zombies".
169
168
170 This command can only be run using an |authtoken| with admin rights to
169 This command can only be run using an |authtoken| with admin rights to
171 the specified repository.
170 the specified repository.
172
171
173 This command takes the following options:
172 This command takes the following options:
174
173
175 :param apiuser: This is filled automatically from the |authtoken|.
174 :param apiuser: This is filled automatically from the |authtoken|.
176 :type apiuser: AuthUser
175 :type apiuser: AuthUser
177 :param remove_obsolete: Deletes repositories from the database that
176 :param remove_obsolete: Deletes repositories from the database that
178 are not found on the filesystem.
177 are not found on the filesystem.
179 :type remove_obsolete: Optional(``True`` | ``False``)
178 :type remove_obsolete: Optional(``True`` | ``False``)
180
179
181 Example output:
180 Example output:
182
181
183 .. code-block:: bash
182 .. code-block:: bash
184
183
185 id : <id_given_in_input>
184 id : <id_given_in_input>
186 result : {
185 result : {
187 'added': [<added repository name>,...]
186 'added': [<added repository name>,...]
188 'removed': [<removed repository name>,...]
187 'removed': [<removed repository name>,...]
189 }
188 }
190 error : null
189 error : null
191
190
192 Example error output:
191 Example error output:
193
192
194 .. code-block:: bash
193 .. code-block:: bash
195
194
196 id : <id_given_in_input>
195 id : <id_given_in_input>
197 result : null
196 result : null
198 error : {
197 error : {
199 'Error occurred during rescan repositories action'
198 'Error occurred during rescan repositories action'
200 }
199 }
201
200
202 """
201 """
203 if not has_superadmin_permission(apiuser):
202 if not has_superadmin_permission(apiuser):
204 raise JSONRPCForbidden()
203 raise JSONRPCForbidden()
205
204
206 try:
205 try:
207 rm_obsolete = Optional.extract(remove_obsolete)
206 rm_obsolete = Optional.extract(remove_obsolete)
208 added, removed = repo2db_mapper(ScmModel().repo_scan(),
207 added, removed = repo2db_mapper(ScmModel().repo_scan(),
209 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
208 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
210 return {'added': added, 'removed': removed}
209 return {'added': added, 'removed': removed}
211 except Exception:
210 except Exception:
212 log.exception('Failed to run repo rescann')
211 log.exception('Failed to run repo rescann')
213 raise JSONRPCError(
212 raise JSONRPCError(
214 'Error occurred during rescan repositories action'
213 'Error occurred during rescan repositories action'
215 )
214 )
216
215
217
216
218 @jsonrpc_method()
217 @jsonrpc_method()
219 def cleanup_sessions(request, apiuser, older_then=Optional(60)):
218 def cleanup_sessions(request, apiuser, older_then=Optional(60)):
220 """
219 """
221 Triggers a session cleanup action.
220 Triggers a session cleanup action.
222
221
223 If the ``older_then`` option is set, only sessions that hasn't been
222 If the ``older_then`` option is set, only sessions that hasn't been
224 accessed in the given number of days will be removed.
223 accessed in the given number of days will be removed.
225
224
226 This command can only be run using an |authtoken| with admin rights to
225 This command can only be run using an |authtoken| with admin rights to
227 the specified repository.
226 the specified repository.
228
227
229 This command takes the following options:
228 This command takes the following options:
230
229
231 :param apiuser: This is filled automatically from the |authtoken|.
230 :param apiuser: This is filled automatically from the |authtoken|.
232 :type apiuser: AuthUser
231 :type apiuser: AuthUser
233 :param older_then: Deletes session that hasn't been accessed
232 :param older_then: Deletes session that hasn't been accessed
234 in given number of days.
233 in given number of days.
235 :type older_then: Optional(int)
234 :type older_then: Optional(int)
236
235
237 Example output:
236 Example output:
238
237
239 .. code-block:: bash
238 .. code-block:: bash
240
239
241 id : <id_given_in_input>
240 id : <id_given_in_input>
242 result: {
241 result: {
243 "backend": "<type of backend>",
242 "backend": "<type of backend>",
244 "sessions_removed": <number_of_removed_sessions>
243 "sessions_removed": <number_of_removed_sessions>
245 }
244 }
246 error : null
245 error : null
247
246
248 Example error output:
247 Example error output:
249
248
250 .. code-block:: bash
249 .. code-block:: bash
251
250
252 id : <id_given_in_input>
251 id : <id_given_in_input>
253 result : null
252 result : null
254 error : {
253 error : {
255 'Error occurred during session cleanup'
254 'Error occurred during session cleanup'
256 }
255 }
257
256
258 """
257 """
259 if not has_superadmin_permission(apiuser):
258 if not has_superadmin_permission(apiuser):
260 raise JSONRPCForbidden()
259 raise JSONRPCForbidden()
261
260
262 older_then = safe_int(Optional.extract(older_then)) or 60
261 older_then = safe_int(Optional.extract(older_then)) or 60
263 older_than_seconds = 60 * 60 * 24 * older_then
262 older_than_seconds = 60 * 60 * 24 * older_then
264
263
265 config = system_info.rhodecode_config().get_value()['value']['config']
264 config = system_info.rhodecode_config().get_value()['value']['config']
266 session_model = user_sessions.get_session_handler(
265 session_model = user_sessions.get_session_handler(
267 config.get('beaker.session.type', 'memory'))(config)
266 config.get('beaker.session.type', 'memory'))(config)
268
267
269 backend = session_model.SESSION_TYPE
268 backend = session_model.SESSION_TYPE
270 try:
269 try:
271 cleaned = session_model.clean_sessions(
270 cleaned = session_model.clean_sessions(
272 older_than_seconds=older_than_seconds)
271 older_than_seconds=older_than_seconds)
273 return {'sessions_removed': cleaned, 'backend': backend}
272 return {'sessions_removed': cleaned, 'backend': backend}
274 except user_sessions.CleanupCommand as msg:
273 except user_sessions.CleanupCommand as msg:
275 return {'cleanup_command': str(msg), 'backend': backend}
274 return {'cleanup_command': str(msg), 'backend': backend}
276 except Exception as e:
275 except Exception as e:
277 log.exception('Failed session cleanup')
276 log.exception('Failed session cleanup')
278 raise JSONRPCError(
277 raise JSONRPCError(
279 'Error occurred during session cleanup'
278 'Error occurred during session cleanup'
280 )
279 )
281
280
282
281
283 @jsonrpc_method()
282 @jsonrpc_method()
284 def get_method(request, apiuser, pattern=Optional('*')):
283 def get_method(request, apiuser, pattern=Optional('*')):
285 """
284 """
286 Returns list of all available API methods. By default match pattern
285 Returns list of all available API methods. By default match pattern
287 os "*" but any other pattern can be specified. eg *comment* will return
286 os "*" but any other pattern can be specified. eg *comment* will return
288 all methods with comment inside them. If just single method is matched
287 all methods with comment inside them. If just single method is matched
289 returned data will also include method specification
288 returned data will also include method specification
290
289
291 This command can only be run using an |authtoken| with admin rights to
290 This command can only be run using an |authtoken| with admin rights to
292 the specified repository.
291 the specified repository.
293
292
294 This command takes the following options:
293 This command takes the following options:
295
294
296 :param apiuser: This is filled automatically from the |authtoken|.
295 :param apiuser: This is filled automatically from the |authtoken|.
297 :type apiuser: AuthUser
296 :type apiuser: AuthUser
298 :param pattern: pattern to match method names against
297 :param pattern: pattern to match method names against
299 :type pattern: Optional("*")
298 :type pattern: Optional("*")
300
299
301 Example output:
300 Example output:
302
301
303 .. code-block:: bash
302 .. code-block:: bash
304
303
305 id : <id_given_in_input>
304 id : <id_given_in_input>
306 "result": [
305 "result": [
307 "changeset_comment",
306 "changeset_comment",
308 "comment_pull_request",
307 "comment_pull_request",
309 "comment_commit"
308 "comment_commit"
310 ]
309 ]
311 error : null
310 error : null
312
311
313 .. code-block:: bash
312 .. code-block:: bash
314
313
315 id : <id_given_in_input>
314 id : <id_given_in_input>
316 "result": [
315 "result": [
317 "comment_commit",
316 "comment_commit",
318 {
317 {
319 "apiuser": "<RequiredType>",
318 "apiuser": "<RequiredType>",
320 "comment_type": "<Optional:u'note'>",
319 "comment_type": "<Optional:u'note'>",
321 "commit_id": "<RequiredType>",
320 "commit_id": "<RequiredType>",
322 "message": "<RequiredType>",
321 "message": "<RequiredType>",
323 "repoid": "<RequiredType>",
322 "repoid": "<RequiredType>",
324 "request": "<RequiredType>",
323 "request": "<RequiredType>",
325 "resolves_comment_id": "<Optional:None>",
324 "resolves_comment_id": "<Optional:None>",
326 "status": "<Optional:None>",
325 "status": "<Optional:None>",
327 "userid": "<Optional:<OptionalAttr:apiuser>>"
326 "userid": "<Optional:<OptionalAttr:apiuser>>"
328 }
327 }
329 ]
328 ]
330 error : null
329 error : null
331 """
330 """
332 from rhodecode.config.patches import inspect_getargspec
331 from rhodecode.config.patches import inspect_getargspec
333 inspect = inspect_getargspec()
332 inspect = inspect_getargspec()
334
333
335 if not has_superadmin_permission(apiuser):
334 if not has_superadmin_permission(apiuser):
336 raise JSONRPCForbidden()
335 raise JSONRPCForbidden()
337
336
338 pattern = Optional.extract(pattern)
337 pattern = Optional.extract(pattern)
339
338
340 matches = find_methods(request.registry.jsonrpc_methods, pattern)
339 matches = find_methods(request.registry.jsonrpc_methods, pattern)
341
340
342 args_desc = []
341 args_desc = []
343 matches_keys = list(matches.keys())
342 matches_keys = list(matches.keys())
344 if len(matches_keys) == 1:
343 if len(matches_keys) == 1:
345 func = matches[matches_keys[0]]
344 func = matches[matches_keys[0]]
346
345
347 argspec = inspect.getargspec(func)
346 argspec = inspect.getargspec(func)
348 arglist = argspec[0]
347 arglist = argspec[0]
349 defaults = list(map(repr, argspec[3] or []))
348 defaults = list(map(repr, argspec[3] or []))
350
349
351 default_empty = '<RequiredType>'
350 default_empty = '<RequiredType>'
352
351
353 # kw arguments required by this method
352 # kw arguments required by this method
354 func_kwargs = dict(itertools.zip_longest(
353 func_kwargs = dict(itertools.zip_longest(
355 reversed(arglist), reversed(defaults), fillvalue=default_empty))
354 reversed(arglist), reversed(defaults), fillvalue=default_empty))
356 args_desc.append(func_kwargs)
355 args_desc.append(func_kwargs)
357
356
358 return matches_keys + args_desc
357 return matches_keys + args_desc
359
358
360
359
361 @jsonrpc_method()
360 @jsonrpc_method()
362 def store_exception(request, apiuser, exc_data_json, prefix=Optional('rhodecode')):
361 def store_exception(request, apiuser, exc_data_json, prefix=Optional('rhodecode')):
363 """
362 """
364 Stores sent exception inside the built-in exception tracker in |RCE| server.
363 Stores sent exception inside the built-in exception tracker in |RCE| server.
365
364
366 This command can only be run using an |authtoken| with admin rights to
365 This command can only be run using an |authtoken| with admin rights to
367 the specified repository.
366 the specified repository.
368
367
369 This command takes the following options:
368 This command takes the following options:
370
369
371 :param apiuser: This is filled automatically from the |authtoken|.
370 :param apiuser: This is filled automatically from the |authtoken|.
372 :type apiuser: AuthUser
371 :type apiuser: AuthUser
373
372
374 :param exc_data_json: JSON data with exception e.g
373 :param exc_data_json: JSON data with exception e.g
375 {"exc_traceback": "Value `1` is not allowed", "exc_type_name": "ValueError"}
374 {"exc_traceback": "Value `1` is not allowed", "exc_type_name": "ValueError"}
376 :type exc_data_json: JSON data
375 :type exc_data_json: JSON data
377
376
378 :param prefix: prefix for error type, e.g 'rhodecode', 'vcsserver', 'rhodecode-tools'
377 :param prefix: prefix for error type, e.g 'rhodecode', 'vcsserver', 'rhodecode-tools'
379 :type prefix: Optional("rhodecode")
378 :type prefix: Optional("rhodecode")
380
379
381 Example output:
380 Example output:
382
381
383 .. code-block:: bash
382 .. code-block:: bash
384
383
385 id : <id_given_in_input>
384 id : <id_given_in_input>
386 "result": {
385 "result": {
387 "exc_id": 139718459226384,
386 "exc_id": 139718459226384,
388 "exc_url": "http://localhost:8080/_admin/settings/exceptions/139718459226384"
387 "exc_url": "http://localhost:8080/_admin/settings/exceptions/139718459226384"
389 }
388 }
390 error : null
389 error : null
391 """
390 """
392 if not has_superadmin_permission(apiuser):
391 if not has_superadmin_permission(apiuser):
393 raise JSONRPCForbidden()
392 raise JSONRPCForbidden()
394
393
395 prefix = Optional.extract(prefix)
394 prefix = Optional.extract(prefix)
396 exc_id = exc_tracking.generate_id()
395 exc_id = exc_tracking.generate_id()
397
396
398 try:
397 try:
399 exc_data = json.loads(exc_data_json)
398 exc_data = json.loads(exc_data_json)
400 except Exception:
399 except Exception:
401 log.error('Failed to parse JSON: %r', exc_data_json)
400 log.error('Failed to parse JSON: %r', exc_data_json)
402 raise JSONRPCError('Failed to parse JSON data from exc_data_json field. '
401 raise JSONRPCError('Failed to parse JSON data from exc_data_json field. '
403 'Please make sure it contains a valid JSON.')
402 'Please make sure it contains a valid JSON.')
404
403
405 try:
404 try:
406 exc_traceback = exc_data['exc_traceback']
405 exc_traceback = exc_data['exc_traceback']
407 exc_type_name = exc_data['exc_type_name']
406 exc_type_name = exc_data['exc_type_name']
408 exc_value = ''
407 exc_value = ''
409 except KeyError as err:
408 except KeyError as err:
410 raise JSONRPCError(
409 raise JSONRPCError(
411 f'Missing exc_traceback, or exc_type_name '
410 f'Missing exc_traceback, or exc_type_name '
412 f'in exc_data_json field. Missing: {err}')
411 f'in exc_data_json field. Missing: {err}')
413
412
414 class ExcType:
413 class ExcType:
415 __name__ = exc_type_name
414 __name__ = exc_type_name
416
415
417 exc_info = (ExcType(), exc_value, exc_traceback)
416 exc_info = (ExcType(), exc_value, exc_traceback)
418
417
419 exc_tracking._store_exception(
418 exc_tracking._store_exception(
420 exc_id=exc_id, exc_info=exc_info, prefix=prefix)
419 exc_id=exc_id, exc_info=exc_info, prefix=prefix)
421
420
422 exc_url = request.route_url(
421 exc_url = request.route_url(
423 'admin_settings_exception_tracker_show', exception_id=exc_id)
422 'admin_settings_exception_tracker_show', exception_id=exc_id)
424 return {'exc_id': exc_id, 'exc_url': exc_url}
423 return {'exc_id': exc_id, 'exc_url': exc_url}
@@ -1,714 +1,711 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 import logging
20 import logging
21 import collections
21 import collections
22
22
23 import datetime
23 import datetime
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26
26
27 import rhodecode
27 import rhodecode
28
28
29 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
29 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31 from pyramid.response import Response
31 from pyramid.response import Response
32
32
33 from rhodecode.apps._base import BaseAppView
33 from rhodecode.apps._base import BaseAppView
34 from rhodecode.apps._base.navigation import navigation_list
34 from rhodecode.apps._base.navigation import navigation_list
35 from rhodecode.apps.svn_support.config_keys import generate_config
35 from rhodecode.apps.svn_support.config_keys import generate_config
36 from rhodecode.lib import helpers as h
36 from rhodecode.lib import helpers as h
37 from rhodecode.lib.auth import (
37 from rhodecode.lib.auth import (
38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
39 from rhodecode.lib.celerylib import tasks, run_task
39 from rhodecode.lib.celerylib import tasks, run_task
40 from rhodecode.lib.str_utils import safe_str
40 from rhodecode.lib.str_utils import safe_str
41 from rhodecode.lib.utils import repo2db_mapper
41 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
42 from rhodecode.lib.utils2 import str2bool, AttributeDict
42 from rhodecode.lib.utils2 import str2bool, AttributeDict
43 from rhodecode.lib.index import searcher_from_config
43 from rhodecode.lib.index import searcher_from_config
44
44
45 from rhodecode.model.db import RhodeCodeUi, Repository
45 from rhodecode.model.db import RhodeCodeUi, Repository
46 from rhodecode.model.forms import (ApplicationSettingsForm,
46 from rhodecode.model.forms import (ApplicationSettingsForm,
47 ApplicationUiSettingsForm, ApplicationVisualisationForm,
47 ApplicationUiSettingsForm, ApplicationVisualisationForm,
48 LabsSettingsForm, IssueTrackerPatternsForm)
48 LabsSettingsForm, IssueTrackerPatternsForm)
49 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.repo_group import RepoGroupModel
51
51
52 from rhodecode.model.scm import ScmModel
52 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.notification import EmailNotificationModel
53 from rhodecode.model.notification import EmailNotificationModel
54 from rhodecode.model.meta import Session
54 from rhodecode.model.meta import Session
55 from rhodecode.model.settings import (
55 from rhodecode.model.settings import (
56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
57 SettingsModel)
57 SettingsModel)
58
58
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62
62
63 class AdminSettingsView(BaseAppView):
63 class AdminSettingsView(BaseAppView):
64
64
65 def load_default_context(self):
65 def load_default_context(self):
66 c = self._get_local_tmpl_context()
66 c = self._get_local_tmpl_context()
67 c.labs_active = str2bool(
67 c.labs_active = str2bool(
68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
69 c.navlist = navigation_list(self.request)
69 c.navlist = navigation_list(self.request)
70 return c
70 return c
71
71
72 @classmethod
72 @classmethod
73 def _get_ui_settings(cls):
73 def _get_ui_settings(cls):
74 ret = RhodeCodeUi.query().all()
74 ret = RhodeCodeUi.query().all()
75
75
76 if not ret:
76 if not ret:
77 raise Exception('Could not get application ui settings !')
77 raise Exception('Could not get application ui settings !')
78 settings = {}
78 settings = {}
79 for each in ret:
79 for each in ret:
80 k = each.ui_key
80 k = each.ui_key
81 v = each.ui_value
81 v = each.ui_value
82 if k == '/':
82 if k == '/':
83 k = 'root_path'
83 k = 'root_path'
84
84
85 if k in ['push_ssl', 'publish', 'enabled']:
85 if k in ['push_ssl', 'publish', 'enabled']:
86 v = str2bool(v)
86 v = str2bool(v)
87
87
88 if k.find('.') != -1:
88 if k.find('.') != -1:
89 k = k.replace('.', '_')
89 k = k.replace('.', '_')
90
90
91 if each.ui_section in ['hooks', 'extensions']:
91 if each.ui_section in ['hooks', 'extensions']:
92 v = each.ui_active
92 v = each.ui_active
93
93
94 settings[each.ui_section + '_' + k] = v
94 settings[each.ui_section + '_' + k] = v
95 return settings
95 return settings
96
96
97 @classmethod
97 @classmethod
98 def _form_defaults(cls):
98 def _form_defaults(cls):
99 defaults = SettingsModel().get_all_settings()
99 defaults = SettingsModel().get_all_settings()
100 defaults.update(cls._get_ui_settings())
100 defaults.update(cls._get_ui_settings())
101
101
102 defaults.update({
102 defaults.update({
103 'new_svn_branch': '',
103 'new_svn_branch': '',
104 'new_svn_tag': '',
104 'new_svn_tag': '',
105 })
105 })
106 return defaults
106 return defaults
107
107
108 @LoginRequired()
108 @LoginRequired()
109 @HasPermissionAllDecorator('hg.admin')
109 @HasPermissionAllDecorator('hg.admin')
110 def settings_vcs(self):
110 def settings_vcs(self):
111 c = self.load_default_context()
111 c = self.load_default_context()
112 c.active = 'vcs'
112 c.active = 'vcs'
113 model = VcsSettingsModel()
113 model = VcsSettingsModel()
114 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
114 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
115 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
115 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
116
116
117 settings = self.request.registry.settings
117 settings = self.request.registry.settings
118 c.svn_proxy_generate_config = settings[generate_config]
118 c.svn_proxy_generate_config = settings[generate_config]
119
119
120 defaults = self._form_defaults()
120 defaults = self._form_defaults()
121
121
122 model.create_largeobjects_dirs_if_needed(defaults['paths_root_path'])
122 model.create_largeobjects_dirs_if_needed(defaults['paths_root_path'])
123
123
124 data = render('rhodecode:templates/admin/settings/settings.mako',
124 data = render('rhodecode:templates/admin/settings/settings.mako',
125 self._get_template_context(c), self.request)
125 self._get_template_context(c), self.request)
126 html = formencode.htmlfill.render(
126 html = formencode.htmlfill.render(
127 data,
127 data,
128 defaults=defaults,
128 defaults=defaults,
129 encoding="UTF-8",
129 encoding="UTF-8",
130 force_defaults=False
130 force_defaults=False
131 )
131 )
132 return Response(html)
132 return Response(html)
133
133
134 @LoginRequired()
134 @LoginRequired()
135 @HasPermissionAllDecorator('hg.admin')
135 @HasPermissionAllDecorator('hg.admin')
136 @CSRFRequired()
136 @CSRFRequired()
137 def settings_vcs_update(self):
137 def settings_vcs_update(self):
138 _ = self.request.translate
138 _ = self.request.translate
139 c = self.load_default_context()
139 c = self.load_default_context()
140 c.active = 'vcs'
140 c.active = 'vcs'
141
141
142 model = VcsSettingsModel()
142 model = VcsSettingsModel()
143 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
143 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
144 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
144 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
145
145
146 settings = self.request.registry.settings
146 settings = self.request.registry.settings
147 c.svn_proxy_generate_config = settings[generate_config]
147 c.svn_proxy_generate_config = settings[generate_config]
148
148
149 application_form = ApplicationUiSettingsForm(self.request.translate)()
149 application_form = ApplicationUiSettingsForm(self.request.translate)()
150
150
151 try:
151 try:
152 form_result = application_form.to_python(dict(self.request.POST))
152 form_result = application_form.to_python(dict(self.request.POST))
153 except formencode.Invalid as errors:
153 except formencode.Invalid as errors:
154 h.flash(
154 h.flash(
155 _("Some form inputs contain invalid data."),
155 _("Some form inputs contain invalid data."),
156 category='error')
156 category='error')
157 data = render('rhodecode:templates/admin/settings/settings.mako',
157 data = render('rhodecode:templates/admin/settings/settings.mako',
158 self._get_template_context(c), self.request)
158 self._get_template_context(c), self.request)
159 html = formencode.htmlfill.render(
159 html = formencode.htmlfill.render(
160 data,
160 data,
161 defaults=errors.value,
161 defaults=errors.value,
162 errors=errors.unpack_errors() or {},
162 errors=errors.unpack_errors() or {},
163 prefix_error=False,
163 prefix_error=False,
164 encoding="UTF-8",
164 encoding="UTF-8",
165 force_defaults=False
165 force_defaults=False
166 )
166 )
167 return Response(html)
167 return Response(html)
168
168
169 try:
169 try:
170 if c.visual.allow_repo_location_change:
171 model.update_global_path_setting(form_result['paths_root_path'])
172
173 model.update_global_ssl_setting(form_result['web_push_ssl'])
170 model.update_global_ssl_setting(form_result['web_push_ssl'])
174 model.update_global_hook_settings(form_result)
171 model.update_global_hook_settings(form_result)
175
172
176 model.create_or_update_global_svn_settings(form_result)
173 model.create_or_update_global_svn_settings(form_result)
177 model.create_or_update_global_hg_settings(form_result)
174 model.create_or_update_global_hg_settings(form_result)
178 model.create_or_update_global_git_settings(form_result)
175 model.create_or_update_global_git_settings(form_result)
179 model.create_or_update_global_pr_settings(form_result)
176 model.create_or_update_global_pr_settings(form_result)
180 except Exception:
177 except Exception:
181 log.exception("Exception while updating settings")
178 log.exception("Exception while updating settings")
182 h.flash(_('Error occurred during updating '
179 h.flash(_('Error occurred during updating '
183 'application settings'), category='error')
180 'application settings'), category='error')
184 else:
181 else:
185 Session().commit()
182 Session().commit()
186 h.flash(_('Updated VCS settings'), category='success')
183 h.flash(_('Updated VCS settings'), category='success')
187 raise HTTPFound(h.route_path('admin_settings_vcs'))
184 raise HTTPFound(h.route_path('admin_settings_vcs'))
188
185
189 data = render('rhodecode:templates/admin/settings/settings.mako',
186 data = render('rhodecode:templates/admin/settings/settings.mako',
190 self._get_template_context(c), self.request)
187 self._get_template_context(c), self.request)
191 html = formencode.htmlfill.render(
188 html = formencode.htmlfill.render(
192 data,
189 data,
193 defaults=self._form_defaults(),
190 defaults=self._form_defaults(),
194 encoding="UTF-8",
191 encoding="UTF-8",
195 force_defaults=False
192 force_defaults=False
196 )
193 )
197 return Response(html)
194 return Response(html)
198
195
199 @LoginRequired()
196 @LoginRequired()
200 @HasPermissionAllDecorator('hg.admin')
197 @HasPermissionAllDecorator('hg.admin')
201 @CSRFRequired()
198 @CSRFRequired()
202 def settings_vcs_delete_svn_pattern(self):
199 def settings_vcs_delete_svn_pattern(self):
203 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
200 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
204 model = VcsSettingsModel()
201 model = VcsSettingsModel()
205 try:
202 try:
206 model.delete_global_svn_pattern(delete_pattern_id)
203 model.delete_global_svn_pattern(delete_pattern_id)
207 except SettingNotFound:
204 except SettingNotFound:
208 log.exception(
205 log.exception(
209 'Failed to delete svn_pattern with id %s', delete_pattern_id)
206 'Failed to delete svn_pattern with id %s', delete_pattern_id)
210 raise HTTPNotFound()
207 raise HTTPNotFound()
211
208
212 Session().commit()
209 Session().commit()
213 return True
210 return True
214
211
215 @LoginRequired()
212 @LoginRequired()
216 @HasPermissionAllDecorator('hg.admin')
213 @HasPermissionAllDecorator('hg.admin')
217 def settings_mapping(self):
214 def settings_mapping(self):
218 c = self.load_default_context()
215 c = self.load_default_context()
219 c.active = 'mapping'
216 c.active = 'mapping'
220 c.storage_path = VcsSettingsModel().get_repos_location()
217 c.storage_path = get_rhodecode_repo_store_path()
221 data = render('rhodecode:templates/admin/settings/settings.mako',
218 data = render('rhodecode:templates/admin/settings/settings.mako',
222 self._get_template_context(c), self.request)
219 self._get_template_context(c), self.request)
223 html = formencode.htmlfill.render(
220 html = formencode.htmlfill.render(
224 data,
221 data,
225 defaults=self._form_defaults(),
222 defaults=self._form_defaults(),
226 encoding="UTF-8",
223 encoding="UTF-8",
227 force_defaults=False
224 force_defaults=False
228 )
225 )
229 return Response(html)
226 return Response(html)
230
227
231 @LoginRequired()
228 @LoginRequired()
232 @HasPermissionAllDecorator('hg.admin')
229 @HasPermissionAllDecorator('hg.admin')
233 @CSRFRequired()
230 @CSRFRequired()
234 def settings_mapping_update(self):
231 def settings_mapping_update(self):
235 _ = self.request.translate
232 _ = self.request.translate
236 c = self.load_default_context()
233 c = self.load_default_context()
237 c.active = 'mapping'
234 c.active = 'mapping'
238 rm_obsolete = self.request.POST.get('destroy', False)
235 rm_obsolete = self.request.POST.get('destroy', False)
239 invalidate_cache = self.request.POST.get('invalidate', False)
236 invalidate_cache = self.request.POST.get('invalidate', False)
240 log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete)
237 log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete)
241
238
242 if invalidate_cache:
239 if invalidate_cache:
243 log.debug('invalidating all repositories cache')
240 log.debug('invalidating all repositories cache')
244 for repo in Repository.get_all():
241 for repo in Repository.get_all():
245 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
242 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
246
243
247 filesystem_repos = ScmModel().repo_scan()
244 filesystem_repos = ScmModel().repo_scan()
248 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, force_hooks_rebuild=True)
245 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, force_hooks_rebuild=True)
249 PermissionModel().trigger_permission_flush()
246 PermissionModel().trigger_permission_flush()
250
247
251 def _repr(rm_repo):
248 def _repr(rm_repo):
252 return ', '.join(map(safe_str, rm_repo)) or '-'
249 return ', '.join(map(safe_str, rm_repo)) or '-'
253
250
254 h.flash(_('Repositories successfully '
251 h.flash(_('Repositories successfully '
255 'rescanned added: %s ; removed: %s') %
252 'rescanned added: %s ; removed: %s') %
256 (_repr(added), _repr(removed)),
253 (_repr(added), _repr(removed)),
257 category='success')
254 category='success')
258 raise HTTPFound(h.route_path('admin_settings_mapping'))
255 raise HTTPFound(h.route_path('admin_settings_mapping'))
259
256
260 @LoginRequired()
257 @LoginRequired()
261 @HasPermissionAllDecorator('hg.admin')
258 @HasPermissionAllDecorator('hg.admin')
262 def settings_global(self):
259 def settings_global(self):
263 c = self.load_default_context()
260 c = self.load_default_context()
264 c.active = 'global'
261 c.active = 'global'
265 c.personal_repo_group_default_pattern = RepoGroupModel()\
262 c.personal_repo_group_default_pattern = RepoGroupModel()\
266 .get_personal_group_name_pattern()
263 .get_personal_group_name_pattern()
267
264
268 data = render('rhodecode:templates/admin/settings/settings.mako',
265 data = render('rhodecode:templates/admin/settings/settings.mako',
269 self._get_template_context(c), self.request)
266 self._get_template_context(c), self.request)
270 html = formencode.htmlfill.render(
267 html = formencode.htmlfill.render(
271 data,
268 data,
272 defaults=self._form_defaults(),
269 defaults=self._form_defaults(),
273 encoding="UTF-8",
270 encoding="UTF-8",
274 force_defaults=False
271 force_defaults=False
275 )
272 )
276 return Response(html)
273 return Response(html)
277
274
278 @LoginRequired()
275 @LoginRequired()
279 @HasPermissionAllDecorator('hg.admin')
276 @HasPermissionAllDecorator('hg.admin')
280 @CSRFRequired()
277 @CSRFRequired()
281 def settings_global_update(self):
278 def settings_global_update(self):
282 _ = self.request.translate
279 _ = self.request.translate
283 c = self.load_default_context()
280 c = self.load_default_context()
284 c.active = 'global'
281 c.active = 'global'
285 c.personal_repo_group_default_pattern = RepoGroupModel()\
282 c.personal_repo_group_default_pattern = RepoGroupModel()\
286 .get_personal_group_name_pattern()
283 .get_personal_group_name_pattern()
287 application_form = ApplicationSettingsForm(self.request.translate)()
284 application_form = ApplicationSettingsForm(self.request.translate)()
288 try:
285 try:
289 form_result = application_form.to_python(dict(self.request.POST))
286 form_result = application_form.to_python(dict(self.request.POST))
290 except formencode.Invalid as errors:
287 except formencode.Invalid as errors:
291 h.flash(
288 h.flash(
292 _("Some form inputs contain invalid data."),
289 _("Some form inputs contain invalid data."),
293 category='error')
290 category='error')
294 data = render('rhodecode:templates/admin/settings/settings.mako',
291 data = render('rhodecode:templates/admin/settings/settings.mako',
295 self._get_template_context(c), self.request)
292 self._get_template_context(c), self.request)
296 html = formencode.htmlfill.render(
293 html = formencode.htmlfill.render(
297 data,
294 data,
298 defaults=errors.value,
295 defaults=errors.value,
299 errors=errors.unpack_errors() or {},
296 errors=errors.unpack_errors() or {},
300 prefix_error=False,
297 prefix_error=False,
301 encoding="UTF-8",
298 encoding="UTF-8",
302 force_defaults=False
299 force_defaults=False
303 )
300 )
304 return Response(html)
301 return Response(html)
305
302
306 settings = [
303 settings = [
307 ('title', 'rhodecode_title', 'unicode'),
304 ('title', 'rhodecode_title', 'unicode'),
308 ('realm', 'rhodecode_realm', 'unicode'),
305 ('realm', 'rhodecode_realm', 'unicode'),
309 ('pre_code', 'rhodecode_pre_code', 'unicode'),
306 ('pre_code', 'rhodecode_pre_code', 'unicode'),
310 ('post_code', 'rhodecode_post_code', 'unicode'),
307 ('post_code', 'rhodecode_post_code', 'unicode'),
311 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
308 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
312 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
309 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
313 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
310 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
314 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
311 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
315 ]
312 ]
316
313
317 try:
314 try:
318 for setting, form_key, type_ in settings:
315 for setting, form_key, type_ in settings:
319 sett = SettingsModel().create_or_update_setting(
316 sett = SettingsModel().create_or_update_setting(
320 setting, form_result[form_key], type_)
317 setting, form_result[form_key], type_)
321 Session().add(sett)
318 Session().add(sett)
322
319
323 Session().commit()
320 Session().commit()
324 SettingsModel().invalidate_settings_cache()
321 SettingsModel().invalidate_settings_cache()
325 h.flash(_('Updated application settings'), category='success')
322 h.flash(_('Updated application settings'), category='success')
326 except Exception:
323 except Exception:
327 log.exception("Exception while updating application settings")
324 log.exception("Exception while updating application settings")
328 h.flash(
325 h.flash(
329 _('Error occurred during updating application settings'),
326 _('Error occurred during updating application settings'),
330 category='error')
327 category='error')
331
328
332 raise HTTPFound(h.route_path('admin_settings_global'))
329 raise HTTPFound(h.route_path('admin_settings_global'))
333
330
334 @LoginRequired()
331 @LoginRequired()
335 @HasPermissionAllDecorator('hg.admin')
332 @HasPermissionAllDecorator('hg.admin')
336 def settings_visual(self):
333 def settings_visual(self):
337 c = self.load_default_context()
334 c = self.load_default_context()
338 c.active = 'visual'
335 c.active = 'visual'
339
336
340 data = render('rhodecode:templates/admin/settings/settings.mako',
337 data = render('rhodecode:templates/admin/settings/settings.mako',
341 self._get_template_context(c), self.request)
338 self._get_template_context(c), self.request)
342 html = formencode.htmlfill.render(
339 html = formencode.htmlfill.render(
343 data,
340 data,
344 defaults=self._form_defaults(),
341 defaults=self._form_defaults(),
345 encoding="UTF-8",
342 encoding="UTF-8",
346 force_defaults=False
343 force_defaults=False
347 )
344 )
348 return Response(html)
345 return Response(html)
349
346
350 @LoginRequired()
347 @LoginRequired()
351 @HasPermissionAllDecorator('hg.admin')
348 @HasPermissionAllDecorator('hg.admin')
352 @CSRFRequired()
349 @CSRFRequired()
353 def settings_visual_update(self):
350 def settings_visual_update(self):
354 _ = self.request.translate
351 _ = self.request.translate
355 c = self.load_default_context()
352 c = self.load_default_context()
356 c.active = 'visual'
353 c.active = 'visual'
357 application_form = ApplicationVisualisationForm(self.request.translate)()
354 application_form = ApplicationVisualisationForm(self.request.translate)()
358 try:
355 try:
359 form_result = application_form.to_python(dict(self.request.POST))
356 form_result = application_form.to_python(dict(self.request.POST))
360 except formencode.Invalid as errors:
357 except formencode.Invalid as errors:
361 h.flash(
358 h.flash(
362 _("Some form inputs contain invalid data."),
359 _("Some form inputs contain invalid data."),
363 category='error')
360 category='error')
364 data = render('rhodecode:templates/admin/settings/settings.mako',
361 data = render('rhodecode:templates/admin/settings/settings.mako',
365 self._get_template_context(c), self.request)
362 self._get_template_context(c), self.request)
366 html = formencode.htmlfill.render(
363 html = formencode.htmlfill.render(
367 data,
364 data,
368 defaults=errors.value,
365 defaults=errors.value,
369 errors=errors.unpack_errors() or {},
366 errors=errors.unpack_errors() or {},
370 prefix_error=False,
367 prefix_error=False,
371 encoding="UTF-8",
368 encoding="UTF-8",
372 force_defaults=False
369 force_defaults=False
373 )
370 )
374 return Response(html)
371 return Response(html)
375
372
376 try:
373 try:
377 settings = [
374 settings = [
378 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
375 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
379 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
376 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
380 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
377 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
381 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
378 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
382 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
379 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
383 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
380 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
384 ('show_version', 'rhodecode_show_version', 'bool'),
381 ('show_version', 'rhodecode_show_version', 'bool'),
385 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
382 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
386 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
383 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
387 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
384 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
388 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
385 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
389 ('clone_uri_id_tmpl', 'rhodecode_clone_uri_id_tmpl', 'unicode'),
386 ('clone_uri_id_tmpl', 'rhodecode_clone_uri_id_tmpl', 'unicode'),
390 ('clone_uri_ssh_tmpl', 'rhodecode_clone_uri_ssh_tmpl', 'unicode'),
387 ('clone_uri_ssh_tmpl', 'rhodecode_clone_uri_ssh_tmpl', 'unicode'),
391 ('support_url', 'rhodecode_support_url', 'unicode'),
388 ('support_url', 'rhodecode_support_url', 'unicode'),
392 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
389 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
393 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
390 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
394 ]
391 ]
395 for setting, form_key, type_ in settings:
392 for setting, form_key, type_ in settings:
396 sett = SettingsModel().create_or_update_setting(
393 sett = SettingsModel().create_or_update_setting(
397 setting, form_result[form_key], type_)
394 setting, form_result[form_key], type_)
398 Session().add(sett)
395 Session().add(sett)
399
396
400 Session().commit()
397 Session().commit()
401 SettingsModel().invalidate_settings_cache()
398 SettingsModel().invalidate_settings_cache()
402 h.flash(_('Updated visualisation settings'), category='success')
399 h.flash(_('Updated visualisation settings'), category='success')
403 except Exception:
400 except Exception:
404 log.exception("Exception updating visualization settings")
401 log.exception("Exception updating visualization settings")
405 h.flash(_('Error occurred during updating '
402 h.flash(_('Error occurred during updating '
406 'visualisation settings'),
403 'visualisation settings'),
407 category='error')
404 category='error')
408
405
409 raise HTTPFound(h.route_path('admin_settings_visual'))
406 raise HTTPFound(h.route_path('admin_settings_visual'))
410
407
411 @LoginRequired()
408 @LoginRequired()
412 @HasPermissionAllDecorator('hg.admin')
409 @HasPermissionAllDecorator('hg.admin')
413 def settings_issuetracker(self):
410 def settings_issuetracker(self):
414 c = self.load_default_context()
411 c = self.load_default_context()
415 c.active = 'issuetracker'
412 c.active = 'issuetracker'
416 defaults = c.rc_config
413 defaults = c.rc_config
417
414
418 entry_key = 'rhodecode_issuetracker_pat_'
415 entry_key = 'rhodecode_issuetracker_pat_'
419
416
420 c.issuetracker_entries = {}
417 c.issuetracker_entries = {}
421 for k, v in defaults.items():
418 for k, v in defaults.items():
422 if k.startswith(entry_key):
419 if k.startswith(entry_key):
423 uid = k[len(entry_key):]
420 uid = k[len(entry_key):]
424 c.issuetracker_entries[uid] = None
421 c.issuetracker_entries[uid] = None
425
422
426 for uid in c.issuetracker_entries:
423 for uid in c.issuetracker_entries:
427 c.issuetracker_entries[uid] = AttributeDict({
424 c.issuetracker_entries[uid] = AttributeDict({
428 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
425 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
429 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
426 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
430 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
427 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
431 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
428 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
432 })
429 })
433
430
434 return self._get_template_context(c)
431 return self._get_template_context(c)
435
432
436 @LoginRequired()
433 @LoginRequired()
437 @HasPermissionAllDecorator('hg.admin')
434 @HasPermissionAllDecorator('hg.admin')
438 @CSRFRequired()
435 @CSRFRequired()
439 def settings_issuetracker_test(self):
436 def settings_issuetracker_test(self):
440 error_container = []
437 error_container = []
441
438
442 urlified_commit = h.urlify_commit_message(
439 urlified_commit = h.urlify_commit_message(
443 self.request.POST.get('test_text', ''),
440 self.request.POST.get('test_text', ''),
444 'repo_group/test_repo1', error_container=error_container)
441 'repo_group/test_repo1', error_container=error_container)
445 if error_container:
442 if error_container:
446 def converter(inp):
443 def converter(inp):
447 return h.html_escape(inp)
444 return h.html_escape(inp)
448
445
449 return 'ERRORS: ' + '\n'.join(map(converter, error_container))
446 return 'ERRORS: ' + '\n'.join(map(converter, error_container))
450
447
451 return urlified_commit
448 return urlified_commit
452
449
453 @LoginRequired()
450 @LoginRequired()
454 @HasPermissionAllDecorator('hg.admin')
451 @HasPermissionAllDecorator('hg.admin')
455 @CSRFRequired()
452 @CSRFRequired()
456 def settings_issuetracker_update(self):
453 def settings_issuetracker_update(self):
457 _ = self.request.translate
454 _ = self.request.translate
458 self.load_default_context()
455 self.load_default_context()
459 settings_model = IssueTrackerSettingsModel()
456 settings_model = IssueTrackerSettingsModel()
460
457
461 try:
458 try:
462 form = IssueTrackerPatternsForm(self.request.translate)()
459 form = IssueTrackerPatternsForm(self.request.translate)()
463 data = form.to_python(self.request.POST)
460 data = form.to_python(self.request.POST)
464 except formencode.Invalid as errors:
461 except formencode.Invalid as errors:
465 log.exception('Failed to add new pattern')
462 log.exception('Failed to add new pattern')
466 error = errors
463 error = errors
467 h.flash(_(f'Invalid issue tracker pattern: {error}'),
464 h.flash(_(f'Invalid issue tracker pattern: {error}'),
468 category='error')
465 category='error')
469 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
466 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
470
467
471 if data:
468 if data:
472 for uid in data.get('delete_patterns', []):
469 for uid in data.get('delete_patterns', []):
473 settings_model.delete_entries(uid)
470 settings_model.delete_entries(uid)
474
471
475 for pattern in data.get('patterns', []):
472 for pattern in data.get('patterns', []):
476 for setting, value, type_ in pattern:
473 for setting, value, type_ in pattern:
477 sett = settings_model.create_or_update_setting(
474 sett = settings_model.create_or_update_setting(
478 setting, value, type_)
475 setting, value, type_)
479 Session().add(sett)
476 Session().add(sett)
480
477
481 Session().commit()
478 Session().commit()
482
479
483 SettingsModel().invalidate_settings_cache()
480 SettingsModel().invalidate_settings_cache()
484 h.flash(_('Updated issue tracker entries'), category='success')
481 h.flash(_('Updated issue tracker entries'), category='success')
485 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
482 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
486
483
487 @LoginRequired()
484 @LoginRequired()
488 @HasPermissionAllDecorator('hg.admin')
485 @HasPermissionAllDecorator('hg.admin')
489 @CSRFRequired()
486 @CSRFRequired()
490 def settings_issuetracker_delete(self):
487 def settings_issuetracker_delete(self):
491 _ = self.request.translate
488 _ = self.request.translate
492 self.load_default_context()
489 self.load_default_context()
493 uid = self.request.POST.get('uid')
490 uid = self.request.POST.get('uid')
494 try:
491 try:
495 IssueTrackerSettingsModel().delete_entries(uid)
492 IssueTrackerSettingsModel().delete_entries(uid)
496 except Exception:
493 except Exception:
497 log.exception('Failed to delete issue tracker setting %s', uid)
494 log.exception('Failed to delete issue tracker setting %s', uid)
498 raise HTTPNotFound()
495 raise HTTPNotFound()
499
496
500 SettingsModel().invalidate_settings_cache()
497 SettingsModel().invalidate_settings_cache()
501 h.flash(_('Removed issue tracker entry.'), category='success')
498 h.flash(_('Removed issue tracker entry.'), category='success')
502
499
503 return {'deleted': uid}
500 return {'deleted': uid}
504
501
505 @LoginRequired()
502 @LoginRequired()
506 @HasPermissionAllDecorator('hg.admin')
503 @HasPermissionAllDecorator('hg.admin')
507 def settings_email(self):
504 def settings_email(self):
508 c = self.load_default_context()
505 c = self.load_default_context()
509 c.active = 'email'
506 c.active = 'email'
510 c.rhodecode_ini = rhodecode.CONFIG
507 c.rhodecode_ini = rhodecode.CONFIG
511
508
512 data = render('rhodecode:templates/admin/settings/settings.mako',
509 data = render('rhodecode:templates/admin/settings/settings.mako',
513 self._get_template_context(c), self.request)
510 self._get_template_context(c), self.request)
514 html = formencode.htmlfill.render(
511 html = formencode.htmlfill.render(
515 data,
512 data,
516 defaults=self._form_defaults(),
513 defaults=self._form_defaults(),
517 encoding="UTF-8",
514 encoding="UTF-8",
518 force_defaults=False
515 force_defaults=False
519 )
516 )
520 return Response(html)
517 return Response(html)
521
518
522 @LoginRequired()
519 @LoginRequired()
523 @HasPermissionAllDecorator('hg.admin')
520 @HasPermissionAllDecorator('hg.admin')
524 @CSRFRequired()
521 @CSRFRequired()
525 def settings_email_update(self):
522 def settings_email_update(self):
526 _ = self.request.translate
523 _ = self.request.translate
527 c = self.load_default_context()
524 c = self.load_default_context()
528 c.active = 'email'
525 c.active = 'email'
529
526
530 test_email = self.request.POST.get('test_email')
527 test_email = self.request.POST.get('test_email')
531
528
532 if not test_email:
529 if not test_email:
533 h.flash(_('Please enter email address'), category='error')
530 h.flash(_('Please enter email address'), category='error')
534 raise HTTPFound(h.route_path('admin_settings_email'))
531 raise HTTPFound(h.route_path('admin_settings_email'))
535
532
536 email_kwargs = {
533 email_kwargs = {
537 'date': datetime.datetime.now(),
534 'date': datetime.datetime.now(),
538 'user': self._rhodecode_db_user
535 'user': self._rhodecode_db_user
539 }
536 }
540
537
541 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
538 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
542 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
539 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
543
540
544 recipients = [test_email] if test_email else None
541 recipients = [test_email] if test_email else None
545
542
546 run_task(tasks.send_email, recipients, subject,
543 run_task(tasks.send_email, recipients, subject,
547 email_body_plaintext, email_body)
544 email_body_plaintext, email_body)
548
545
549 h.flash(_('Send email task created'), category='success')
546 h.flash(_('Send email task created'), category='success')
550 raise HTTPFound(h.route_path('admin_settings_email'))
547 raise HTTPFound(h.route_path('admin_settings_email'))
551
548
552 @LoginRequired()
549 @LoginRequired()
553 @HasPermissionAllDecorator('hg.admin')
550 @HasPermissionAllDecorator('hg.admin')
554 def settings_hooks(self):
551 def settings_hooks(self):
555 c = self.load_default_context()
552 c = self.load_default_context()
556 c.active = 'hooks'
553 c.active = 'hooks'
557
554
558 model = SettingsModel()
555 model = SettingsModel()
559 c.hooks = model.get_builtin_hooks()
556 c.hooks = model.get_builtin_hooks()
560 c.custom_hooks = model.get_custom_hooks()
557 c.custom_hooks = model.get_custom_hooks()
561
558
562 data = render('rhodecode:templates/admin/settings/settings.mako',
559 data = render('rhodecode:templates/admin/settings/settings.mako',
563 self._get_template_context(c), self.request)
560 self._get_template_context(c), self.request)
564 html = formencode.htmlfill.render(
561 html = formencode.htmlfill.render(
565 data,
562 data,
566 defaults=self._form_defaults(),
563 defaults=self._form_defaults(),
567 encoding="UTF-8",
564 encoding="UTF-8",
568 force_defaults=False
565 force_defaults=False
569 )
566 )
570 return Response(html)
567 return Response(html)
571
568
572 @LoginRequired()
569 @LoginRequired()
573 @HasPermissionAllDecorator('hg.admin')
570 @HasPermissionAllDecorator('hg.admin')
574 @CSRFRequired()
571 @CSRFRequired()
575 def settings_hooks_update(self):
572 def settings_hooks_update(self):
576 _ = self.request.translate
573 _ = self.request.translate
577 c = self.load_default_context()
574 c = self.load_default_context()
578 c.active = 'hooks'
575 c.active = 'hooks'
579 if c.visual.allow_custom_hooks_settings:
576 if c.visual.allow_custom_hooks_settings:
580 ui_key = self.request.POST.get('new_hook_ui_key')
577 ui_key = self.request.POST.get('new_hook_ui_key')
581 ui_value = self.request.POST.get('new_hook_ui_value')
578 ui_value = self.request.POST.get('new_hook_ui_value')
582
579
583 hook_id = self.request.POST.get('hook_id')
580 hook_id = self.request.POST.get('hook_id')
584 new_hook = False
581 new_hook = False
585
582
586 model = SettingsModel()
583 model = SettingsModel()
587 try:
584 try:
588 if ui_value and ui_key:
585 if ui_value and ui_key:
589 model.create_or_update_hook(ui_key, ui_value)
586 model.create_or_update_hook(ui_key, ui_value)
590 h.flash(_('Added new hook'), category='success')
587 h.flash(_('Added new hook'), category='success')
591 new_hook = True
588 new_hook = True
592 elif hook_id:
589 elif hook_id:
593 RhodeCodeUi.delete(hook_id)
590 RhodeCodeUi.delete(hook_id)
594 Session().commit()
591 Session().commit()
595
592
596 # check for edits
593 # check for edits
597 update = False
594 update = False
598 _d = self.request.POST.dict_of_lists()
595 _d = self.request.POST.dict_of_lists()
599 for k, v in zip(_d.get('hook_ui_key', []),
596 for k, v in zip(_d.get('hook_ui_key', []),
600 _d.get('hook_ui_value_new', [])):
597 _d.get('hook_ui_value_new', [])):
601 model.create_or_update_hook(k, v)
598 model.create_or_update_hook(k, v)
602 update = True
599 update = True
603
600
604 if update and not new_hook:
601 if update and not new_hook:
605 h.flash(_('Updated hooks'), category='success')
602 h.flash(_('Updated hooks'), category='success')
606 Session().commit()
603 Session().commit()
607 except Exception:
604 except Exception:
608 log.exception("Exception during hook creation")
605 log.exception("Exception during hook creation")
609 h.flash(_('Error occurred during hook creation'),
606 h.flash(_('Error occurred during hook creation'),
610 category='error')
607 category='error')
611
608
612 raise HTTPFound(h.route_path('admin_settings_hooks'))
609 raise HTTPFound(h.route_path('admin_settings_hooks'))
613
610
614 @LoginRequired()
611 @LoginRequired()
615 @HasPermissionAllDecorator('hg.admin')
612 @HasPermissionAllDecorator('hg.admin')
616 def settings_search(self):
613 def settings_search(self):
617 c = self.load_default_context()
614 c = self.load_default_context()
618 c.active = 'search'
615 c.active = 'search'
619
616
620 c.searcher = searcher_from_config(self.request.registry.settings)
617 c.searcher = searcher_from_config(self.request.registry.settings)
621 c.statistics = c.searcher.statistics(self.request.translate)
618 c.statistics = c.searcher.statistics(self.request.translate)
622
619
623 return self._get_template_context(c)
620 return self._get_template_context(c)
624
621
625 @LoginRequired()
622 @LoginRequired()
626 @HasPermissionAllDecorator('hg.admin')
623 @HasPermissionAllDecorator('hg.admin')
627 def settings_labs(self):
624 def settings_labs(self):
628 c = self.load_default_context()
625 c = self.load_default_context()
629 if not c.labs_active:
626 if not c.labs_active:
630 raise HTTPFound(h.route_path('admin_settings'))
627 raise HTTPFound(h.route_path('admin_settings'))
631
628
632 c.active = 'labs'
629 c.active = 'labs'
633 c.lab_settings = _LAB_SETTINGS
630 c.lab_settings = _LAB_SETTINGS
634
631
635 data = render('rhodecode:templates/admin/settings/settings.mako',
632 data = render('rhodecode:templates/admin/settings/settings.mako',
636 self._get_template_context(c), self.request)
633 self._get_template_context(c), self.request)
637 html = formencode.htmlfill.render(
634 html = formencode.htmlfill.render(
638 data,
635 data,
639 defaults=self._form_defaults(),
636 defaults=self._form_defaults(),
640 encoding="UTF-8",
637 encoding="UTF-8",
641 force_defaults=False
638 force_defaults=False
642 )
639 )
643 return Response(html)
640 return Response(html)
644
641
645 @LoginRequired()
642 @LoginRequired()
646 @HasPermissionAllDecorator('hg.admin')
643 @HasPermissionAllDecorator('hg.admin')
647 @CSRFRequired()
644 @CSRFRequired()
648 def settings_labs_update(self):
645 def settings_labs_update(self):
649 _ = self.request.translate
646 _ = self.request.translate
650 c = self.load_default_context()
647 c = self.load_default_context()
651 c.active = 'labs'
648 c.active = 'labs'
652
649
653 application_form = LabsSettingsForm(self.request.translate)()
650 application_form = LabsSettingsForm(self.request.translate)()
654 try:
651 try:
655 form_result = application_form.to_python(dict(self.request.POST))
652 form_result = application_form.to_python(dict(self.request.POST))
656 except formencode.Invalid as errors:
653 except formencode.Invalid as errors:
657 h.flash(
654 h.flash(
658 _("Some form inputs contain invalid data."),
655 _("Some form inputs contain invalid data."),
659 category='error')
656 category='error')
660 data = render('rhodecode:templates/admin/settings/settings.mako',
657 data = render('rhodecode:templates/admin/settings/settings.mako',
661 self._get_template_context(c), self.request)
658 self._get_template_context(c), self.request)
662 html = formencode.htmlfill.render(
659 html = formencode.htmlfill.render(
663 data,
660 data,
664 defaults=errors.value,
661 defaults=errors.value,
665 errors=errors.unpack_errors() or {},
662 errors=errors.unpack_errors() or {},
666 prefix_error=False,
663 prefix_error=False,
667 encoding="UTF-8",
664 encoding="UTF-8",
668 force_defaults=False
665 force_defaults=False
669 )
666 )
670 return Response(html)
667 return Response(html)
671
668
672 try:
669 try:
673 session = Session()
670 session = Session()
674 for setting in _LAB_SETTINGS:
671 for setting in _LAB_SETTINGS:
675 setting_name = setting.key[len('rhodecode_'):]
672 setting_name = setting.key[len('rhodecode_'):]
676 sett = SettingsModel().create_or_update_setting(
673 sett = SettingsModel().create_or_update_setting(
677 setting_name, form_result[setting.key], setting.type)
674 setting_name, form_result[setting.key], setting.type)
678 session.add(sett)
675 session.add(sett)
679
676
680 except Exception:
677 except Exception:
681 log.exception('Exception while updating lab settings')
678 log.exception('Exception while updating lab settings')
682 h.flash(_('Error occurred during updating labs settings'),
679 h.flash(_('Error occurred during updating labs settings'),
683 category='error')
680 category='error')
684 else:
681 else:
685 Session().commit()
682 Session().commit()
686 SettingsModel().invalidate_settings_cache()
683 SettingsModel().invalidate_settings_cache()
687 h.flash(_('Updated Labs settings'), category='success')
684 h.flash(_('Updated Labs settings'), category='success')
688 raise HTTPFound(h.route_path('admin_settings_labs'))
685 raise HTTPFound(h.route_path('admin_settings_labs'))
689
686
690 data = render('rhodecode:templates/admin/settings/settings.mako',
687 data = render('rhodecode:templates/admin/settings/settings.mako',
691 self._get_template_context(c), self.request)
688 self._get_template_context(c), self.request)
692 html = formencode.htmlfill.render(
689 html = formencode.htmlfill.render(
693 data,
690 data,
694 defaults=self._form_defaults(),
691 defaults=self._form_defaults(),
695 encoding="UTF-8",
692 encoding="UTF-8",
696 force_defaults=False
693 force_defaults=False
697 )
694 )
698 return Response(html)
695 return Response(html)
699
696
700
697
701 # :param key: name of the setting including the 'rhodecode_' prefix
698 # :param key: name of the setting including the 'rhodecode_' prefix
702 # :param type: the RhodeCodeSetting type to use.
699 # :param type: the RhodeCodeSetting type to use.
703 # :param group: the i18ned group in which we should dispaly this setting
700 # :param group: the i18ned group in which we should dispaly this setting
704 # :param label: the i18ned label we should display for this setting
701 # :param label: the i18ned label we should display for this setting
705 # :param help: the i18ned help we should dispaly for this setting
702 # :param help: the i18ned help we should dispaly for this setting
706 LabSetting = collections.namedtuple(
703 LabSetting = collections.namedtuple(
707 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
704 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
708
705
709
706
710 # This list has to be kept in sync with the form
707 # This list has to be kept in sync with the form
711 # rhodecode.model.forms.LabsSettingsForm.
708 # rhodecode.model.forms.LabsSettingsForm.
712 _LAB_SETTINGS = [
709 _LAB_SETTINGS = [
713
710
714 ]
711 ]
@@ -1,97 +1,97 b''
1 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import codecs
19 import codecs
20 import logging
20 import logging
21 import os
21 import os
22 from pyramid.renderers import render
22 from pyramid.renderers import render
23
23
24 from rhodecode.events import trigger
24 from rhodecode.events import trigger
25 from rhodecode.lib.utils import get_rhodecode_realm, get_rhodecode_base_path
25 from rhodecode.lib.utils import get_rhodecode_realm, get_rhodecode_repo_store_path
26 from rhodecode.lib.utils2 import str2bool
26 from rhodecode.lib.utils2 import str2bool
27 from rhodecode.model.db import RepoGroup
27 from rhodecode.model.db import RepoGroup
28
28
29 from . import config_keys
29 from . import config_keys
30 from .events import ModDavSvnConfigChange
30 from .events import ModDavSvnConfigChange
31
31
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 def write_mod_dav_svn_config(settings):
36 def write_mod_dav_svn_config(settings):
37 use_ssl = str2bool(settings['force_https'])
37 use_ssl = str2bool(settings['force_https'])
38 file_path = settings[config_keys.config_file_path]
38 file_path = settings[config_keys.config_file_path]
39 config = _render_mod_dav_svn_config(
39 config = _render_mod_dav_svn_config(
40 use_ssl=use_ssl,
40 use_ssl=use_ssl,
41 parent_path_root=get_rhodecode_base_path(),
41 parent_path_root=get_rhodecode_repo_store_path(),
42 list_parent_path=settings[config_keys.list_parent_path],
42 list_parent_path=settings[config_keys.list_parent_path],
43 location_root=settings[config_keys.location_root],
43 location_root=settings[config_keys.location_root],
44 repo_groups=RepoGroup.get_all_repo_groups(),
44 repo_groups=RepoGroup.get_all_repo_groups(),
45 realm=get_rhodecode_realm(), template=settings[config_keys.template])
45 realm=get_rhodecode_realm(), template=settings[config_keys.template])
46 _write_mod_dav_svn_config(config, file_path)
46 _write_mod_dav_svn_config(config, file_path)
47 return file_path
47 return file_path
48
48
49
49
50 def generate_mod_dav_svn_config(registry):
50 def generate_mod_dav_svn_config(registry):
51 """
51 """
52 Generate the configuration file for use with subversion's mod_dav_svn
52 Generate the configuration file for use with subversion's mod_dav_svn
53 module. The configuration has to contain a <Location> block for each
53 module. The configuration has to contain a <Location> block for each
54 available repository group because the mod_dav_svn module does not support
54 available repository group because the mod_dav_svn module does not support
55 repositories organized in sub folders.
55 repositories organized in sub folders.
56 """
56 """
57 settings = registry.settings
57 settings = registry.settings
58 file_path = write_mod_dav_svn_config(settings)
58 file_path = write_mod_dav_svn_config(settings)
59
59
60 # Trigger an event on mod dav svn configuration change.
60 # Trigger an event on mod dav svn configuration change.
61 trigger(ModDavSvnConfigChange(), registry)
61 trigger(ModDavSvnConfigChange(), registry)
62 return file_path
62 return file_path
63
63
64
64
65 def _render_mod_dav_svn_config(
65 def _render_mod_dav_svn_config(
66 parent_path_root, list_parent_path, location_root, repo_groups, realm,
66 parent_path_root, list_parent_path, location_root, repo_groups, realm,
67 use_ssl, template):
67 use_ssl, template):
68 """
68 """
69 Render mod_dav_svn configuration to string.
69 Render mod_dav_svn configuration to string.
70 """
70 """
71 repo_group_paths = []
71 repo_group_paths = []
72 for repo_group in repo_groups:
72 for repo_group in repo_groups:
73 group_path = repo_group.full_path_splitted
73 group_path = repo_group.full_path_splitted
74 location = os.path.join(location_root, *group_path)
74 location = os.path.join(location_root, *group_path)
75 parent_path = os.path.join(parent_path_root, *group_path)
75 parent_path = os.path.join(parent_path_root, *group_path)
76 repo_group_paths.append((location, parent_path))
76 repo_group_paths.append((location, parent_path))
77
77
78 context = {
78 context = {
79 'location_root': location_root,
79 'location_root': location_root,
80 'parent_path_root': parent_path_root,
80 'parent_path_root': parent_path_root,
81 'repo_group_paths': repo_group_paths,
81 'repo_group_paths': repo_group_paths,
82 'svn_list_parent_path': list_parent_path,
82 'svn_list_parent_path': list_parent_path,
83 'rhodecode_realm': realm,
83 'rhodecode_realm': realm,
84 'use_https': use_ssl,
84 'use_https': use_ssl,
85 }
85 }
86 template = template or \
86 template = template or \
87 'rhodecode:apps/svn_support/templates/mod-dav-svn.conf.mako'
87 'rhodecode:apps/svn_support/templates/mod-dav-svn.conf.mako'
88 # Render the configuration template to string.
88 # Render the configuration template to string.
89 return render(template, context)
89 return render(template, context)
90
90
91
91
92 def _write_mod_dav_svn_config(config, filepath):
92 def _write_mod_dav_svn_config(config, filepath):
93 """
93 """
94 Write mod_dav_svn config to file.
94 Write mod_dav_svn config to file.
95 """
95 """
96 with codecs.open(filepath, 'w', encoding='utf-8') as f:
96 with codecs.open(filepath, 'w', encoding='utf-8') as f:
97 f.write(config)
97 f.write(config)
@@ -1,199 +1,201 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import tempfile
20 import tempfile
21 import logging
21 import logging
22
22
23 from pyramid.settings import asbool
23 from pyramid.settings import asbool
24
24
25 from rhodecode.config.settings_maker import SettingsMaker
25 from rhodecode.config.settings_maker import SettingsMaker
26 from rhodecode.config import utils as config_utils
26 from rhodecode.config import utils as config_utils
27
27
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30
30
31 def sanitize_settings_and_apply_defaults(global_config, settings):
31 def sanitize_settings_and_apply_defaults(global_config, settings):
32 """
32 """
33 Applies settings defaults and does all type conversion.
33 Applies settings defaults and does all type conversion.
34
34
35 We would move all settings parsing and preparation into this place, so that
35 We would move all settings parsing and preparation into this place, so that
36 we have only one place left which deals with this part. The remaining parts
36 we have only one place left which deals with this part. The remaining parts
37 of the application would start to rely fully on well-prepared settings.
37 of the application would start to rely fully on well-prepared settings.
38
38
39 This piece would later be split up per topic to avoid a big fat monster
39 This piece would later be split up per topic to avoid a big fat monster
40 function.
40 function.
41 """
41 """
42 jn = os.path.join
42 jn = os.path.join
43
43
44 global_settings_maker = SettingsMaker(global_config)
44 global_settings_maker = SettingsMaker(global_config)
45 global_settings_maker.make_setting('debug', default=False, parser='bool')
45 global_settings_maker.make_setting('debug', default=False, parser='bool')
46 debug_enabled = asbool(global_config.get('debug'))
46 debug_enabled = asbool(global_config.get('debug'))
47
47
48 settings_maker = SettingsMaker(settings)
48 settings_maker = SettingsMaker(settings)
49
49
50 settings_maker.make_setting(
50 settings_maker.make_setting(
51 'logging.autoconfigure',
51 'logging.autoconfigure',
52 default=False,
52 default=False,
53 parser='bool')
53 parser='bool')
54
54
55 logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
55 logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
56 settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
56 settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
57
57
58 # Default includes, possible to change as a user
58 # Default includes, possible to change as a user
59 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
59 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
60 log.debug(
60 log.debug(
61 "Using the following pyramid.includes: %s",
61 "Using the following pyramid.includes: %s",
62 pyramid_includes)
62 pyramid_includes)
63
63
64 settings_maker.make_setting('rhodecode.edition', 'Community Edition')
64 settings_maker.make_setting('rhodecode.edition', 'Community Edition')
65 settings_maker.make_setting('rhodecode.edition_id', 'CE')
65 settings_maker.make_setting('rhodecode.edition_id', 'CE')
66
66
67 if 'mako.default_filters' not in settings:
67 if 'mako.default_filters' not in settings:
68 # set custom default filters if we don't have it defined
68 # set custom default filters if we don't have it defined
69 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
69 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
70 settings['mako.default_filters'] = 'h_filter'
70 settings['mako.default_filters'] = 'h_filter'
71
71
72 if 'mako.directories' not in settings:
72 if 'mako.directories' not in settings:
73 mako_directories = settings.setdefault('mako.directories', [
73 mako_directories = settings.setdefault('mako.directories', [
74 # Base templates of the original application
74 # Base templates of the original application
75 'rhodecode:templates',
75 'rhodecode:templates',
76 ])
76 ])
77 log.debug(
77 log.debug(
78 "Using the following Mako template directories: %s",
78 "Using the following Mako template directories: %s",
79 mako_directories)
79 mako_directories)
80
80
81 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
81 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
82 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
82 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
83 raw_url = settings['beaker.session.url']
83 raw_url = settings['beaker.session.url']
84 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
84 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
85 settings['beaker.session.url'] = 'redis://' + raw_url
85 settings['beaker.session.url'] = 'redis://' + raw_url
86
86
87 settings_maker.make_setting('__file__', global_config.get('__file__'))
87 settings_maker.make_setting('__file__', global_config.get('__file__'))
88
88
89 # TODO: johbo: Re-think this, usually the call to config.include
89 # TODO: johbo: Re-think this, usually the call to config.include
90 # should allow to pass in a prefix.
90 # should allow to pass in a prefix.
91 settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
91 settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
92
92
93 # Sanitize generic settings.
93 # Sanitize generic settings.
94 settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
94 settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
95 settings_maker.make_setting('gzip_responses', False, parser='bool')
95 settings_maker.make_setting('gzip_responses', False, parser='bool')
96 settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
96 settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
97
97
98 # statsd
98 # statsd
99 settings_maker.make_setting('statsd.enabled', False, parser='bool')
99 settings_maker.make_setting('statsd.enabled', False, parser='bool')
100 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
100 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
101 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
101 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
102 settings_maker.make_setting('statsd.statsd_prefix', '')
102 settings_maker.make_setting('statsd.statsd_prefix', '')
103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
104
104
105 settings_maker.make_setting('vcs.svn.compatible_version', '')
105 settings_maker.make_setting('vcs.svn.compatible_version', '')
106 settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
106 settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
107 settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
107 settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
108 settings_maker.make_setting('vcs.hooks.protocol', 'http')
108 settings_maker.make_setting('vcs.hooks.protocol', 'http')
109 settings_maker.make_setting('vcs.hooks.host', '*')
109 settings_maker.make_setting('vcs.hooks.host', '*')
110 settings_maker.make_setting('vcs.scm_app_implementation', 'http')
110 settings_maker.make_setting('vcs.scm_app_implementation', 'http')
111 settings_maker.make_setting('vcs.server', '')
111 settings_maker.make_setting('vcs.server', '')
112 settings_maker.make_setting('vcs.server.protocol', 'http')
112 settings_maker.make_setting('vcs.server.protocol', 'http')
113 settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
113 settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
114 settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
114 settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
115 settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
115 settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
116 settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
116 settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
117 settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
117 settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
118
118
119 settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
119 settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
120
120
121 # repo_store path
122 settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store')
121 # Support legacy values of vcs.scm_app_implementation. Legacy
123 # Support legacy values of vcs.scm_app_implementation. Legacy
122 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
124 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
123 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
125 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
124 scm_app_impl = settings['vcs.scm_app_implementation']
126 scm_app_impl = settings['vcs.scm_app_implementation']
125 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
127 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
126 settings['vcs.scm_app_implementation'] = 'http'
128 settings['vcs.scm_app_implementation'] = 'http'
127
129
128 settings_maker.make_setting('appenlight', False, parser='bool')
130 settings_maker.make_setting('appenlight', False, parser='bool')
129
131
130 temp_store = tempfile.gettempdir()
132 temp_store = tempfile.gettempdir()
131 tmp_cache_dir = jn(temp_store, 'rc_cache')
133 tmp_cache_dir = jn(temp_store, 'rc_cache')
132
134
133 # save default, cache dir, and use it for all backends later.
135 # save default, cache dir, and use it for all backends later.
134 default_cache_dir = settings_maker.make_setting(
136 default_cache_dir = settings_maker.make_setting(
135 'cache_dir',
137 'cache_dir',
136 default=tmp_cache_dir, default_when_empty=True,
138 default=tmp_cache_dir, default_when_empty=True,
137 parser='dir:ensured')
139 parser='dir:ensured')
138
140
139 # exception store cache
141 # exception store cache
140 settings_maker.make_setting(
142 settings_maker.make_setting(
141 'exception_tracker.store_path',
143 'exception_tracker.store_path',
142 default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
144 default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
143 parser='dir:ensured'
145 parser='dir:ensured'
144 )
146 )
145
147
146 settings_maker.make_setting(
148 settings_maker.make_setting(
147 'celerybeat-schedule.path',
149 'celerybeat-schedule.path',
148 default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
150 default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
149 parser='file:ensured'
151 parser='file:ensured'
150 )
152 )
151
153
152 settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
154 settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
153 settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
155 settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
154
156
155 # sessions, ensure file since no-value is memory
157 # sessions, ensure file since no-value is memory
156 settings_maker.make_setting('beaker.session.type', 'file')
158 settings_maker.make_setting('beaker.session.type', 'file')
157 settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
159 settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
158
160
159 # cache_general
161 # cache_general
160 settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
162 settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
161 settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
163 settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
162 settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
164 settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
163
165
164 # cache_perms
166 # cache_perms
165 settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
167 settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
166 settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
168 settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
167 settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
169 settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
168
170
169 # cache_repo
171 # cache_repo
170 settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
172 settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
171 settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
173 settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
172 settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
174 settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
173
175
174 # cache_license
176 # cache_license
175 settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
177 settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
176 settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
178 settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
177 settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
179 settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
178
180
179 # cache_repo_longterm memory, 96H
181 # cache_repo_longterm memory, 96H
180 settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
182 settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
181 settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
183 settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
182 settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
184 settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
183
185
184 # sql_cache_short
186 # sql_cache_short
185 settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
187 settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
186 settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
188 settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
187 settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
189 settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
188
190
189 # archive_cache
191 # archive_cache
190 settings_maker.make_setting('archive_cache.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
192 settings_maker.make_setting('archive_cache.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
191 settings_maker.make_setting('archive_cache.cache_size_gb', 10, parser='float')
193 settings_maker.make_setting('archive_cache.cache_size_gb', 10, parser='float')
192 settings_maker.make_setting('archive_cache.cache_shards', 10, parser='int')
194 settings_maker.make_setting('archive_cache.cache_shards', 10, parser='int')
193
195
194 settings_maker.env_expand()
196 settings_maker.env_expand()
195
197
196 # configure instance id
198 # configure instance id
197 config_utils.set_instance_id(settings)
199 config_utils.set_instance_id(settings)
198
200
199 return settings
201 return settings
@@ -1,88 +1,87 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import logging
20 import logging
21 import rhodecode
21 import rhodecode
22 import collections
22 import collections
23
23
24 from rhodecode.config import utils
24 from rhodecode.config import utils
25
25
26 from rhodecode.lib.utils import load_rcextensions
26 from rhodecode.lib.utils import load_rcextensions
27 from rhodecode.lib.utils2 import str2bool
27 from rhodecode.lib.utils2 import str2bool
28 from rhodecode.lib.vcs import connect_vcs
28 from rhodecode.lib.vcs import connect_vcs
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 def load_pyramid_environment(global_config, settings):
33 def load_pyramid_environment(global_config, settings):
34 # Some parts of the code expect a merge of global and app settings.
34 # Some parts of the code expect a merge of global and app settings.
35 settings_merged = global_config.copy()
35 settings_merged = global_config.copy()
36 settings_merged.update(settings)
36 settings_merged.update(settings)
37
37
38 # TODO(marcink): probably not required anymore
38 # TODO(marcink): probably not required anymore
39 # configure channelstream,
39 # configure channelstream,
40 settings_merged['channelstream_config'] = {
40 settings_merged['channelstream_config'] = {
41 'enabled': str2bool(settings_merged.get('channelstream.enabled', False)),
41 'enabled': str2bool(settings_merged.get('channelstream.enabled', False)),
42 'server': settings_merged.get('channelstream.server'),
42 'server': settings_merged.get('channelstream.server'),
43 'secret': settings_merged.get('channelstream.secret')
43 'secret': settings_merged.get('channelstream.secret')
44 }
44 }
45
45
46 # If this is a test run we prepare the test environment like
46 # If this is a test run we prepare the test environment like
47 # creating a test database, test search index and test repositories.
47 # creating a test database, test search index and test repositories.
48 # This has to be done before the database connection is initialized.
48 # This has to be done before the database connection is initialized.
49 if rhodecode.is_test:
49 if rhodecode.is_test:
50 rhodecode.disable_error_handler = True
50 rhodecode.disable_error_handler = True
51 from rhodecode import authentication
51 from rhodecode import authentication
52 authentication.plugin_default_auth_ttl = 0
52 authentication.plugin_default_auth_ttl = 0
53
53
54 utils.initialize_test_environment(settings_merged)
54 utils.initialize_test_environment(settings_merged)
55
55
56 # Initialize the database connection.
56 # Initialize the database connection.
57 utils.initialize_database(settings_merged)
57 utils.initialize_database(settings_merged)
58
58
59 load_rcextensions(root_path=settings_merged['here'])
59 load_rcextensions(root_path=settings_merged['here'])
60
60
61 # Limit backends to `vcs.backends` from configuration, and preserve the order
61 # Limit backends to `vcs.backends` from configuration, and preserve the order
62 for alias in rhodecode.BACKENDS.keys():
62 for alias in rhodecode.BACKENDS.keys():
63 if alias not in settings['vcs.backends']:
63 if alias not in settings['vcs.backends']:
64 del rhodecode.BACKENDS[alias]
64 del rhodecode.BACKENDS[alias]
65
65
66 _sorted_backend = sorted(rhodecode.BACKENDS.items(),
66 _sorted_backend = sorted(rhodecode.BACKENDS.items(),
67 key=lambda item: settings['vcs.backends'].index(item[0]))
67 key=lambda item: settings['vcs.backends'].index(item[0]))
68 rhodecode.BACKENDS = collections.OrderedDict(_sorted_backend)
68 rhodecode.BACKENDS = collections.OrderedDict(_sorted_backend)
69
69
70 log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys())
70 log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys())
71
71
72 # initialize vcs client and optionally run the server if enabled
72 # initialize vcs client and optionally run the server if enabled
73 vcs_server_uri = settings['vcs.server']
73 vcs_server_uri = settings['vcs.server']
74 vcs_server_enabled = settings['vcs.server.enable']
74 vcs_server_enabled = settings['vcs.server.enable']
75
75
76 utils.configure_vcs(settings)
76 utils.configure_vcs(settings)
77
77
78 # Store the settings to make them available to other modules.
78 # Store the settings to make them available to other modules.
79
79
80 rhodecode.PYRAMID_SETTINGS = settings_merged
80 rhodecode.PYRAMID_SETTINGS = settings_merged
81 rhodecode.CONFIG = settings_merged
81 rhodecode.CONFIG = settings_merged
82 rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id()
82 rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id()
83 rhodecode.CONFIG['default_base_path'] = utils.get_default_base_path()
84
83
85 if vcs_server_enabled:
84 if vcs_server_enabled:
86 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
85 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
87 else:
86 else:
88 log.warning('vcs-server not enabled, vcs connection unavailable')
87 log.warning('vcs-server not enabled, vcs connection unavailable')
@@ -1,116 +1,104 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import platform
20 import platform
21
21
22
22
23 def configure_vcs(config):
23 def configure_vcs(config):
24 """
24 """
25 Patch VCS config with some RhodeCode specific stuff
25 Patch VCS config with some RhodeCode specific stuff
26 """
26 """
27 from rhodecode.lib.vcs import conf
27 from rhodecode.lib.vcs import conf
28 import rhodecode.lib.vcs.conf.settings
28 import rhodecode.lib.vcs.conf.settings
29
29
30 conf.settings.BACKENDS = {
30 conf.settings.BACKENDS = {
31 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
31 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
32 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
32 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
33 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
33 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
34 }
34 }
35
35
36 conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol']
36 conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol']
37 conf.settings.HOOKS_HOST = config['vcs.hooks.host']
37 conf.settings.HOOKS_HOST = config['vcs.hooks.host']
38 conf.settings.DEFAULT_ENCODINGS = config['default_encoding']
38 conf.settings.DEFAULT_ENCODINGS = config['default_encoding']
39 conf.settings.ALIASES[:] = config['vcs.backends']
39 conf.settings.ALIASES[:] = config['vcs.backends']
40 conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version']
40 conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version']
41
41
42
42
43 def initialize_database(config):
43 def initialize_database(config):
44 from rhodecode.lib.utils2 import engine_from_config, get_encryption_key
44 from rhodecode.lib.utils2 import engine_from_config, get_encryption_key
45 from rhodecode.model import init_model
45 from rhodecode.model import init_model
46 engine = engine_from_config(config, 'sqlalchemy.db1.')
46 engine = engine_from_config(config, 'sqlalchemy.db1.')
47 init_model(engine, encryption_key=get_encryption_key(config))
47 init_model(engine, encryption_key=get_encryption_key(config))
48
48
49
49
50 def initialize_test_environment(settings, test_env=None):
50 def initialize_test_environment(settings, test_env=None):
51 if test_env is None:
51 if test_env is None:
52 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
52 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
53
53
54 from rhodecode.lib.utils import (
54 from rhodecode.lib.utils import (
55 create_test_directory, create_test_database, create_test_repositories,
55 create_test_directory, create_test_database, create_test_repositories,
56 create_test_index)
56 create_test_index)
57 from rhodecode.tests import TESTS_TMP_PATH
57 from rhodecode.tests import TESTS_TMP_PATH
58 from rhodecode.lib.vcs.backends.hg import largefiles_store
58 from rhodecode.lib.vcs.backends.hg import largefiles_store
59 from rhodecode.lib.vcs.backends.git import lfs_store
59 from rhodecode.lib.vcs.backends.git import lfs_store
60
60
61 # test repos
61 # test repos
62 if test_env:
62 if test_env:
63 create_test_directory(TESTS_TMP_PATH)
63 create_test_directory(TESTS_TMP_PATH)
64 # large object stores
64 # large object stores
65 create_test_directory(largefiles_store(TESTS_TMP_PATH))
65 create_test_directory(largefiles_store(TESTS_TMP_PATH))
66 create_test_directory(lfs_store(TESTS_TMP_PATH))
66 create_test_directory(lfs_store(TESTS_TMP_PATH))
67
67
68 create_test_database(TESTS_TMP_PATH, settings)
68 create_test_database(TESTS_TMP_PATH, settings)
69 create_test_repositories(TESTS_TMP_PATH, settings)
69 create_test_repositories(TESTS_TMP_PATH, settings)
70 create_test_index(TESTS_TMP_PATH, settings)
70 create_test_index(TESTS_TMP_PATH, settings)
71
71
72
72
73 def get_vcs_server_protocol(config):
73 def get_vcs_server_protocol(config):
74 return config['vcs.server.protocol']
74 return config['vcs.server.protocol']
75
75
76
76
77 def set_instance_id(config):
77 def set_instance_id(config):
78 """
78 """
79 Sets a dynamic generated config['instance_id'] if missing or '*'
79 Sets a dynamic generated config['instance_id'] if missing or '*'
80 E.g instance_id = *cluster-1 or instance_id = *
80 E.g instance_id = *cluster-1 or instance_id = *
81 """
81 """
82
82
83 config['instance_id'] = config.get('instance_id') or ''
83 config['instance_id'] = config.get('instance_id') or ''
84 instance_id = config['instance_id']
84 instance_id = config['instance_id']
85 if instance_id.startswith('*') or not instance_id:
85 if instance_id.startswith('*') or not instance_id:
86 prefix = instance_id.lstrip('*')
86 prefix = instance_id.lstrip('*')
87 _platform_id = platform.uname()[1] or 'instance'
87 _platform_id = platform.uname()[1] or 'instance'
88 config['instance_id'] = '{prefix}uname:{platform}-pid:{pid}'.format(
88 config['instance_id'] = '{prefix}uname:{platform}-pid:{pid}'.format(
89 prefix=prefix,
89 prefix=prefix,
90 platform=_platform_id,
90 platform=_platform_id,
91 pid=os.getpid())
91 pid=os.getpid())
92
92
93
93
94 def get_default_user_id():
94 def get_default_user_id():
95 DEFAULT_USER = 'default'
95 DEFAULT_USER = 'default'
96 from sqlalchemy import text
96 from sqlalchemy import text
97 from rhodecode.model import meta
97 from rhodecode.model import meta
98
98
99 engine = meta.get_engine()
99 engine = meta.get_engine()
100 with meta.SA_Session(engine) as session:
100 with meta.SA_Session(engine) as session:
101 result = session.execute(text("SELECT user_id from users where username = :uname"), {'uname': DEFAULT_USER})
101 result = session.execute(text("SELECT user_id from users where username = :uname"), {'uname': DEFAULT_USER})
102 user_id = result.first()[0]
102 user_id = result.first()[0]
103
103
104 return user_id
104 return user_id
105
106
107 def get_default_base_path():
108 from sqlalchemy import text
109 from rhodecode.model import meta
110
111 engine = meta.get_engine()
112 with meta.SA_Session(engine) as session:
113 result = session.execute(text("SELECT ui_value from rhodecode_ui where ui_key = '/'"))
114 base_path = result.first()[0]
115
116 return base_path
@@ -1,609 +1,607 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 The base Controller API
20 The base Controller API
21 Provides the BaseController class for subclassing. And usage in different
21 Provides the BaseController class for subclassing. And usage in different
22 controllers
22 controllers
23 """
23 """
24
24
25 import logging
25 import logging
26 import socket
26 import socket
27 import base64
27 import base64
28
28
29 import markupsafe
29 import markupsafe
30 import ipaddress
30 import ipaddress
31
31
32 import paste.httpheaders
32 import paste.httpheaders
33 from paste.auth.basic import AuthBasicAuthenticator
33 from paste.auth.basic import AuthBasicAuthenticator
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35
35
36 import rhodecode
36 import rhodecode
37 from rhodecode.authentication.base import VCS_TYPE
37 from rhodecode.authentication.base import VCS_TYPE
38 from rhodecode.lib import auth, utils2
38 from rhodecode.lib import auth, utils2
39 from rhodecode.lib import helpers as h
39 from rhodecode.lib import helpers as h
40 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
40 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
41 from rhodecode.lib.exceptions import UserCreationError
41 from rhodecode.lib.exceptions import UserCreationError
42 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
42 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.lib.str_utils import ascii_bytes, safe_int, safe_str
44 from rhodecode.lib.str_utils import ascii_bytes, safe_int, safe_str
45 from rhodecode.lib.type_utils import aslist, str2bool
45 from rhodecode.lib.type_utils import aslist, str2bool
46 from rhodecode.lib.hash_utils import sha1
46 from rhodecode.lib.hash_utils import sha1
47 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
47 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
48 from rhodecode.model.notification import NotificationModel
48 from rhodecode.model.notification import NotificationModel
49 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
49 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 def _filter_proxy(ip):
54 def _filter_proxy(ip):
55 """
55 """
56 Passed in IP addresses in HEADERS can be in a special format of multiple
56 Passed in IP addresses in HEADERS can be in a special format of multiple
57 ips. Those comma separated IPs are passed from various proxies in the
57 ips. Those comma separated IPs are passed from various proxies in the
58 chain of request processing. The left-most being the original client.
58 chain of request processing. The left-most being the original client.
59 We only care about the first IP which came from the org. client.
59 We only care about the first IP which came from the org. client.
60
60
61 :param ip: ip string from headers
61 :param ip: ip string from headers
62 """
62 """
63 if ',' in ip:
63 if ',' in ip:
64 _ips = ip.split(',')
64 _ips = ip.split(',')
65 _first_ip = _ips[0].strip()
65 _first_ip = _ips[0].strip()
66 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
66 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
67 return _first_ip
67 return _first_ip
68 return ip
68 return ip
69
69
70
70
71 def _filter_port(ip):
71 def _filter_port(ip):
72 """
72 """
73 Removes a port from ip, there are 4 main cases to handle here.
73 Removes a port from ip, there are 4 main cases to handle here.
74 - ipv4 eg. 127.0.0.1
74 - ipv4 eg. 127.0.0.1
75 - ipv6 eg. ::1
75 - ipv6 eg. ::1
76 - ipv4+port eg. 127.0.0.1:8080
76 - ipv4+port eg. 127.0.0.1:8080
77 - ipv6+port eg. [::1]:8080
77 - ipv6+port eg. [::1]:8080
78
78
79 :param ip:
79 :param ip:
80 """
80 """
81 def is_ipv6(ip_addr):
81 def is_ipv6(ip_addr):
82 if hasattr(socket, 'inet_pton'):
82 if hasattr(socket, 'inet_pton'):
83 try:
83 try:
84 socket.inet_pton(socket.AF_INET6, ip_addr)
84 socket.inet_pton(socket.AF_INET6, ip_addr)
85 except socket.error:
85 except socket.error:
86 return False
86 return False
87 else:
87 else:
88 # fallback to ipaddress
88 # fallback to ipaddress
89 try:
89 try:
90 ipaddress.IPv6Address(safe_str(ip_addr))
90 ipaddress.IPv6Address(safe_str(ip_addr))
91 except Exception:
91 except Exception:
92 return False
92 return False
93 return True
93 return True
94
94
95 if ':' not in ip: # must be ipv4 pure ip
95 if ':' not in ip: # must be ipv4 pure ip
96 return ip
96 return ip
97
97
98 if '[' in ip and ']' in ip: # ipv6 with port
98 if '[' in ip and ']' in ip: # ipv6 with port
99 return ip.split(']')[0][1:].lower()
99 return ip.split(']')[0][1:].lower()
100
100
101 # must be ipv6 or ipv4 with port
101 # must be ipv6 or ipv4 with port
102 if is_ipv6(ip):
102 if is_ipv6(ip):
103 return ip
103 return ip
104 else:
104 else:
105 ip, _port = ip.split(':')[:2] # means ipv4+port
105 ip, _port = ip.split(':')[:2] # means ipv4+port
106 return ip
106 return ip
107
107
108
108
109 def get_ip_addr(environ):
109 def get_ip_addr(environ):
110 proxy_key = 'HTTP_X_REAL_IP'
110 proxy_key = 'HTTP_X_REAL_IP'
111 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
111 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
112 def_key = 'REMOTE_ADDR'
112 def_key = 'REMOTE_ADDR'
113
113
114 def ip_filters(ip_):
114 def ip_filters(ip_):
115 return _filter_port(_filter_proxy(ip_))
115 return _filter_port(_filter_proxy(ip_))
116
116
117 ip = environ.get(proxy_key)
117 ip = environ.get(proxy_key)
118 if ip:
118 if ip:
119 return ip_filters(ip)
119 return ip_filters(ip)
120
120
121 ip = environ.get(proxy_key2)
121 ip = environ.get(proxy_key2)
122 if ip:
122 if ip:
123 return ip_filters(ip)
123 return ip_filters(ip)
124
124
125 ip = environ.get(def_key, '0.0.0.0')
125 ip = environ.get(def_key, '0.0.0.0')
126 return ip_filters(ip)
126 return ip_filters(ip)
127
127
128
128
129 def get_server_ip_addr(environ, log_errors=True):
129 def get_server_ip_addr(environ, log_errors=True):
130 hostname = environ.get('SERVER_NAME')
130 hostname = environ.get('SERVER_NAME')
131 try:
131 try:
132 return socket.gethostbyname(hostname)
132 return socket.gethostbyname(hostname)
133 except Exception as e:
133 except Exception as e:
134 if log_errors:
134 if log_errors:
135 # in some cases this lookup is not possible, and we don't want to
135 # in some cases this lookup is not possible, and we don't want to
136 # make it an exception in logs
136 # make it an exception in logs
137 log.exception('Could not retrieve server ip address: %s', e)
137 log.exception('Could not retrieve server ip address: %s', e)
138 return hostname
138 return hostname
139
139
140
140
141 def get_server_port(environ):
141 def get_server_port(environ):
142 return environ.get('SERVER_PORT')
142 return environ.get('SERVER_PORT')
143
143
144
144
145
145
146 def get_user_agent(environ):
146 def get_user_agent(environ):
147 return environ.get('HTTP_USER_AGENT')
147 return environ.get('HTTP_USER_AGENT')
148
148
149
149
150 def vcs_operation_context(
150 def vcs_operation_context(
151 environ, repo_name, username, action, scm, check_locking=True,
151 environ, repo_name, username, action, scm, check_locking=True,
152 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
152 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
153 """
153 """
154 Generate the context for a vcs operation, e.g. push or pull.
154 Generate the context for a vcs operation, e.g. push or pull.
155
155
156 This context is passed over the layers so that hooks triggered by the
156 This context is passed over the layers so that hooks triggered by the
157 vcs operation know details like the user, the user's IP address etc.
157 vcs operation know details like the user, the user's IP address etc.
158
158
159 :param check_locking: Allows to switch of the computation of the locking
159 :param check_locking: Allows to switch of the computation of the locking
160 data. This serves mainly the need of the simplevcs middleware to be
160 data. This serves mainly the need of the simplevcs middleware to be
161 able to disable this for certain operations.
161 able to disable this for certain operations.
162
162
163 """
163 """
164 # Tri-state value: False: unlock, None: nothing, True: lock
164 # Tri-state value: False: unlock, None: nothing, True: lock
165 make_lock = None
165 make_lock = None
166 locked_by = [None, None, None]
166 locked_by = [None, None, None]
167 is_anonymous = username == User.DEFAULT_USER
167 is_anonymous = username == User.DEFAULT_USER
168 user = User.get_by_username(username)
168 user = User.get_by_username(username)
169 if not is_anonymous and check_locking:
169 if not is_anonymous and check_locking:
170 log.debug('Checking locking on repository "%s"', repo_name)
170 log.debug('Checking locking on repository "%s"', repo_name)
171 repo = Repository.get_by_repo_name(repo_name)
171 repo = Repository.get_by_repo_name(repo_name)
172 make_lock, __, locked_by = repo.get_locking_state(
172 make_lock, __, locked_by = repo.get_locking_state(
173 action, user.user_id)
173 action, user.user_id)
174 user_id = user.user_id
174 user_id = user.user_id
175 settings_model = VcsSettingsModel(repo=repo_name)
175 settings_model = VcsSettingsModel(repo=repo_name)
176 ui_settings = settings_model.get_ui_settings()
176 ui_settings = settings_model.get_ui_settings()
177
177
178 # NOTE(marcink): This should be also in sync with
178 # NOTE(marcink): This should be also in sync with
179 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
179 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
180 store = [x for x in ui_settings if x.key == '/']
180 store = [x for x in ui_settings if x.key == '/']
181 repo_store = ''
181 repo_store = ''
182 if store:
182 if store:
183 repo_store = store[0].value
183 repo_store = store[0].value
184
184
185 scm_data = {
185 scm_data = {
186 'ip': get_ip_addr(environ),
186 'ip': get_ip_addr(environ),
187 'username': username,
187 'username': username,
188 'user_id': user_id,
188 'user_id': user_id,
189 'action': action,
189 'action': action,
190 'repository': repo_name,
190 'repository': repo_name,
191 'scm': scm,
191 'scm': scm,
192 'config': rhodecode.CONFIG['__file__'],
192 'config': rhodecode.CONFIG['__file__'],
193 'repo_store': repo_store,
193 'repo_store': repo_store,
194 'make_lock': make_lock,
194 'make_lock': make_lock,
195 'locked_by': locked_by,
195 'locked_by': locked_by,
196 'server_url': utils2.get_server_url(environ),
196 'server_url': utils2.get_server_url(environ),
197 'user_agent': get_user_agent(environ),
197 'user_agent': get_user_agent(environ),
198 'hooks': get_enabled_hook_classes(ui_settings),
198 'hooks': get_enabled_hook_classes(ui_settings),
199 'is_shadow_repo': is_shadow_repo,
199 'is_shadow_repo': is_shadow_repo,
200 'detect_force_push': detect_force_push,
200 'detect_force_push': detect_force_push,
201 'check_branch_perms': check_branch_perms,
201 'check_branch_perms': check_branch_perms,
202 }
202 }
203 return scm_data
203 return scm_data
204
204
205
205
206 class BasicAuth(AuthBasicAuthenticator):
206 class BasicAuth(AuthBasicAuthenticator):
207
207
208 def __init__(self, realm, authfunc, registry, auth_http_code=None,
208 def __init__(self, realm, authfunc, registry, auth_http_code=None,
209 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
209 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
210 super().__init__(realm=realm, authfunc=authfunc)
210 super().__init__(realm=realm, authfunc=authfunc)
211 self.realm = realm
211 self.realm = realm
212 self.rc_realm = rc_realm
212 self.rc_realm = rc_realm
213 self.initial_call = initial_call_detection
213 self.initial_call = initial_call_detection
214 self.authfunc = authfunc
214 self.authfunc = authfunc
215 self.registry = registry
215 self.registry = registry
216 self.acl_repo_name = acl_repo_name
216 self.acl_repo_name = acl_repo_name
217 self._rc_auth_http_code = auth_http_code
217 self._rc_auth_http_code = auth_http_code
218
218
219 def _get_response_from_code(self, http_code, fallback):
219 def _get_response_from_code(self, http_code, fallback):
220 try:
220 try:
221 return get_exception(safe_int(http_code))
221 return get_exception(safe_int(http_code))
222 except Exception:
222 except Exception:
223 log.exception('Failed to fetch response class for code %s, using fallback: %s', http_code, fallback)
223 log.exception('Failed to fetch response class for code %s, using fallback: %s', http_code, fallback)
224 return fallback
224 return fallback
225
225
226 def get_rc_realm(self):
226 def get_rc_realm(self):
227 return safe_str(self.rc_realm)
227 return safe_str(self.rc_realm)
228
228
229 def build_authentication(self):
229 def build_authentication(self):
230 header = [('WWW-Authenticate', f'Basic realm="{self.realm}"')]
230 header = [('WWW-Authenticate', f'Basic realm="{self.realm}"')]
231
231
232 # NOTE: the initial_Call detection seems to be not working/not needed witg latest Mercurial
232 # NOTE: the initial_Call detection seems to be not working/not needed witg latest Mercurial
233 # investigate if we still need it.
233 # investigate if we still need it.
234 if self._rc_auth_http_code and not self.initial_call:
234 if self._rc_auth_http_code and not self.initial_call:
235 # return alternative HTTP code if alternative http return code
235 # return alternative HTTP code if alternative http return code
236 # is specified in RhodeCode config, but ONLY if it's not the
236 # is specified in RhodeCode config, but ONLY if it's not the
237 # FIRST call
237 # FIRST call
238 custom_response_klass = self._get_response_from_code(self._rc_auth_http_code, fallback=HTTPUnauthorized)
238 custom_response_klass = self._get_response_from_code(self._rc_auth_http_code, fallback=HTTPUnauthorized)
239 log.debug('Using custom response class: %s', custom_response_klass)
239 log.debug('Using custom response class: %s', custom_response_klass)
240 return custom_response_klass(headers=header)
240 return custom_response_klass(headers=header)
241 return HTTPUnauthorized(headers=header)
241 return HTTPUnauthorized(headers=header)
242
242
243 def authenticate(self, environ):
243 def authenticate(self, environ):
244 authorization = paste.httpheaders.AUTHORIZATION(environ)
244 authorization = paste.httpheaders.AUTHORIZATION(environ)
245 if not authorization:
245 if not authorization:
246 return self.build_authentication()
246 return self.build_authentication()
247 (auth_meth, auth_creds_b64) = authorization.split(' ', 1)
247 (auth_meth, auth_creds_b64) = authorization.split(' ', 1)
248 if 'basic' != auth_meth.lower():
248 if 'basic' != auth_meth.lower():
249 return self.build_authentication()
249 return self.build_authentication()
250
250
251 credentials = safe_str(base64.b64decode(auth_creds_b64.strip()))
251 credentials = safe_str(base64.b64decode(auth_creds_b64.strip()))
252 _parts = credentials.split(':', 1)
252 _parts = credentials.split(':', 1)
253 if len(_parts) == 2:
253 if len(_parts) == 2:
254 username, password = _parts
254 username, password = _parts
255 auth_data = self.authfunc(
255 auth_data = self.authfunc(
256 username, password, environ, VCS_TYPE,
256 username, password, environ, VCS_TYPE,
257 registry=self.registry, acl_repo_name=self.acl_repo_name)
257 registry=self.registry, acl_repo_name=self.acl_repo_name)
258 if auth_data:
258 if auth_data:
259 return {'username': username, 'auth_data': auth_data}
259 return {'username': username, 'auth_data': auth_data}
260 if username and password:
260 if username and password:
261 # we mark that we actually executed authentication once, at
261 # we mark that we actually executed authentication once, at
262 # that point we can use the alternative auth code
262 # that point we can use the alternative auth code
263 self.initial_call = False
263 self.initial_call = False
264
264
265 return self.build_authentication()
265 return self.build_authentication()
266
266
267 __call__ = authenticate
267 __call__ = authenticate
268
268
269
269
270 def calculate_version_hash(config):
270 def calculate_version_hash(config):
271 return sha1(
271 return sha1(
272 config.get(b'beaker.session.secret', b'') + ascii_bytes(rhodecode.__version__)
272 config.get(b'beaker.session.secret', b'') + ascii_bytes(rhodecode.__version__)
273 )[:8]
273 )[:8]
274
274
275
275
276 def get_current_lang(request):
276 def get_current_lang(request):
277 return getattr(request, '_LOCALE_', request.locale_name)
277 return getattr(request, '_LOCALE_', request.locale_name)
278
278
279
279
280 def attach_context_attributes(context, request, user_id=None, is_api=None):
280 def attach_context_attributes(context, request, user_id=None, is_api=None):
281 """
281 """
282 Attach variables into template context called `c`.
282 Attach variables into template context called `c`.
283 """
283 """
284 config = request.registry.settings
284 config = request.registry.settings
285
285
286 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
286 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
287 context.rc_config = rc_config
287 context.rc_config = rc_config
288 context.rhodecode_version = rhodecode.__version__
288 context.rhodecode_version = rhodecode.__version__
289 context.rhodecode_edition = config.get('rhodecode.edition')
289 context.rhodecode_edition = config.get('rhodecode.edition')
290 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
290 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
291 # unique secret + version does not leak the version but keep consistency
291 # unique secret + version does not leak the version but keep consistency
292 context.rhodecode_version_hash = calculate_version_hash(config)
292 context.rhodecode_version_hash = calculate_version_hash(config)
293
293
294 # Default language set for the incoming request
294 # Default language set for the incoming request
295 context.language = get_current_lang(request)
295 context.language = get_current_lang(request)
296
296
297 # Visual options
297 # Visual options
298 context.visual = AttributeDict({})
298 context.visual = AttributeDict({})
299
299
300 # DB stored Visual Items
300 # DB stored Visual Items
301 context.visual.show_public_icon = str2bool(
301 context.visual.show_public_icon = str2bool(
302 rc_config.get('rhodecode_show_public_icon'))
302 rc_config.get('rhodecode_show_public_icon'))
303 context.visual.show_private_icon = str2bool(
303 context.visual.show_private_icon = str2bool(
304 rc_config.get('rhodecode_show_private_icon'))
304 rc_config.get('rhodecode_show_private_icon'))
305 context.visual.stylify_metatags = str2bool(
305 context.visual.stylify_metatags = str2bool(
306 rc_config.get('rhodecode_stylify_metatags'))
306 rc_config.get('rhodecode_stylify_metatags'))
307 context.visual.dashboard_items = safe_int(
307 context.visual.dashboard_items = safe_int(
308 rc_config.get('rhodecode_dashboard_items', 100))
308 rc_config.get('rhodecode_dashboard_items', 100))
309 context.visual.admin_grid_items = safe_int(
309 context.visual.admin_grid_items = safe_int(
310 rc_config.get('rhodecode_admin_grid_items', 100))
310 rc_config.get('rhodecode_admin_grid_items', 100))
311 context.visual.show_revision_number = str2bool(
311 context.visual.show_revision_number = str2bool(
312 rc_config.get('rhodecode_show_revision_number', True))
312 rc_config.get('rhodecode_show_revision_number', True))
313 context.visual.show_sha_length = safe_int(
313 context.visual.show_sha_length = safe_int(
314 rc_config.get('rhodecode_show_sha_length', 100))
314 rc_config.get('rhodecode_show_sha_length', 100))
315 context.visual.repository_fields = str2bool(
315 context.visual.repository_fields = str2bool(
316 rc_config.get('rhodecode_repository_fields'))
316 rc_config.get('rhodecode_repository_fields'))
317 context.visual.show_version = str2bool(
317 context.visual.show_version = str2bool(
318 rc_config.get('rhodecode_show_version'))
318 rc_config.get('rhodecode_show_version'))
319 context.visual.use_gravatar = str2bool(
319 context.visual.use_gravatar = str2bool(
320 rc_config.get('rhodecode_use_gravatar'))
320 rc_config.get('rhodecode_use_gravatar'))
321 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
321 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
322 context.visual.default_renderer = rc_config.get(
322 context.visual.default_renderer = rc_config.get(
323 'rhodecode_markup_renderer', 'rst')
323 'rhodecode_markup_renderer', 'rst')
324 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
324 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
325 context.visual.rhodecode_support_url = \
325 context.visual.rhodecode_support_url = \
326 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
326 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
327
327
328 context.visual.affected_files_cut_off = 60
328 context.visual.affected_files_cut_off = 60
329
329
330 context.pre_code = rc_config.get('rhodecode_pre_code')
330 context.pre_code = rc_config.get('rhodecode_pre_code')
331 context.post_code = rc_config.get('rhodecode_post_code')
331 context.post_code = rc_config.get('rhodecode_post_code')
332 context.rhodecode_name = rc_config.get('rhodecode_title')
332 context.rhodecode_name = rc_config.get('rhodecode_title')
333 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
333 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
334 # if we have specified default_encoding in the request, it has more
334 # if we have specified default_encoding in the request, it has more
335 # priority
335 # priority
336 if request.GET.get('default_encoding'):
336 if request.GET.get('default_encoding'):
337 context.default_encodings.insert(0, request.GET.get('default_encoding'))
337 context.default_encodings.insert(0, request.GET.get('default_encoding'))
338 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
338 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
339 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
339 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
340 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
340 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
341
341
342 # INI stored
342 # INI stored
343 context.labs_active = str2bool(
343 context.labs_active = str2bool(
344 config.get('labs_settings_active', 'false'))
344 config.get('labs_settings_active', 'false'))
345 context.ssh_enabled = str2bool(
345 context.ssh_enabled = str2bool(
346 config.get('ssh.generate_authorized_keyfile', 'false'))
346 config.get('ssh.generate_authorized_keyfile', 'false'))
347 context.ssh_key_generator_enabled = str2bool(
347 context.ssh_key_generator_enabled = str2bool(
348 config.get('ssh.enable_ui_key_generator', 'true'))
348 config.get('ssh.enable_ui_key_generator', 'true'))
349
349
350 context.visual.allow_repo_location_change = str2bool(
351 config.get('allow_repo_location_change', True))
352 context.visual.allow_custom_hooks_settings = str2bool(
350 context.visual.allow_custom_hooks_settings = str2bool(
353 config.get('allow_custom_hooks_settings', True))
351 config.get('allow_custom_hooks_settings', True))
354 context.debug_style = str2bool(config.get('debug_style', False))
352 context.debug_style = str2bool(config.get('debug_style', False))
355
353
356 context.rhodecode_instanceid = config.get('instance_id')
354 context.rhodecode_instanceid = config.get('instance_id')
357
355
358 context.visual.cut_off_limit_diff = safe_int(
356 context.visual.cut_off_limit_diff = safe_int(
359 config.get('cut_off_limit_diff'), default=0)
357 config.get('cut_off_limit_diff'), default=0)
360 context.visual.cut_off_limit_file = safe_int(
358 context.visual.cut_off_limit_file = safe_int(
361 config.get('cut_off_limit_file'), default=0)
359 config.get('cut_off_limit_file'), default=0)
362
360
363 context.license = AttributeDict({})
361 context.license = AttributeDict({})
364 context.license.hide_license_info = str2bool(
362 context.license.hide_license_info = str2bool(
365 config.get('license.hide_license_info', False))
363 config.get('license.hide_license_info', False))
366
364
367 # AppEnlight
365 # AppEnlight
368 context.appenlight_enabled = config.get('appenlight', False)
366 context.appenlight_enabled = config.get('appenlight', False)
369 context.appenlight_api_public_key = config.get(
367 context.appenlight_api_public_key = config.get(
370 'appenlight.api_public_key', '')
368 'appenlight.api_public_key', '')
371 context.appenlight_server_url = config.get('appenlight.server_url', '')
369 context.appenlight_server_url = config.get('appenlight.server_url', '')
372
370
373 diffmode = {
371 diffmode = {
374 "unified": "unified",
372 "unified": "unified",
375 "sideside": "sideside"
373 "sideside": "sideside"
376 }.get(request.GET.get('diffmode'))
374 }.get(request.GET.get('diffmode'))
377
375
378 if is_api is not None:
376 if is_api is not None:
379 is_api = hasattr(request, 'rpc_user')
377 is_api = hasattr(request, 'rpc_user')
380 session_attrs = {
378 session_attrs = {
381 # defaults
379 # defaults
382 "clone_url_format": "http",
380 "clone_url_format": "http",
383 "diffmode": "sideside",
381 "diffmode": "sideside",
384 "license_fingerprint": request.session.get('license_fingerprint')
382 "license_fingerprint": request.session.get('license_fingerprint')
385 }
383 }
386
384
387 if not is_api:
385 if not is_api:
388 # don't access pyramid session for API calls
386 # don't access pyramid session for API calls
389 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
387 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
390 request.session['rc_user_session_attr.diffmode'] = diffmode
388 request.session['rc_user_session_attr.diffmode'] = diffmode
391
389
392 # session settings per user
390 # session settings per user
393
391
394 for k, v in list(request.session.items()):
392 for k, v in list(request.session.items()):
395 pref = 'rc_user_session_attr.'
393 pref = 'rc_user_session_attr.'
396 if k and k.startswith(pref):
394 if k and k.startswith(pref):
397 k = k[len(pref):]
395 k = k[len(pref):]
398 session_attrs[k] = v
396 session_attrs[k] = v
399
397
400 context.user_session_attrs = session_attrs
398 context.user_session_attrs = session_attrs
401
399
402 # JS template context
400 # JS template context
403 context.template_context = {
401 context.template_context = {
404 'repo_name': None,
402 'repo_name': None,
405 'repo_type': None,
403 'repo_type': None,
406 'repo_landing_commit': None,
404 'repo_landing_commit': None,
407 'rhodecode_user': {
405 'rhodecode_user': {
408 'username': None,
406 'username': None,
409 'email': None,
407 'email': None,
410 'notification_status': False
408 'notification_status': False
411 },
409 },
412 'session_attrs': session_attrs,
410 'session_attrs': session_attrs,
413 'visual': {
411 'visual': {
414 'default_renderer': None
412 'default_renderer': None
415 },
413 },
416 'commit_data': {
414 'commit_data': {
417 'commit_id': None
415 'commit_id': None
418 },
416 },
419 'pull_request_data': {'pull_request_id': None},
417 'pull_request_data': {'pull_request_id': None},
420 'timeago': {
418 'timeago': {
421 'refresh_time': 120 * 1000,
419 'refresh_time': 120 * 1000,
422 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
420 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
423 },
421 },
424 'pyramid_dispatch': {
422 'pyramid_dispatch': {
425
423
426 },
424 },
427 'extra': {'plugins': {}}
425 'extra': {'plugins': {}}
428 }
426 }
429 # END CONFIG VARS
427 # END CONFIG VARS
430 if is_api:
428 if is_api:
431 csrf_token = None
429 csrf_token = None
432 else:
430 else:
433 csrf_token = auth.get_csrf_token(session=request.session)
431 csrf_token = auth.get_csrf_token(session=request.session)
434
432
435 context.csrf_token = csrf_token
433 context.csrf_token = csrf_token
436 context.backends = list(rhodecode.BACKENDS.keys())
434 context.backends = list(rhodecode.BACKENDS.keys())
437
435
438 unread_count = 0
436 unread_count = 0
439 user_bookmark_list = []
437 user_bookmark_list = []
440 if user_id:
438 if user_id:
441 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
439 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
442 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
440 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
443 context.unread_notifications = unread_count
441 context.unread_notifications = unread_count
444 context.bookmark_items = user_bookmark_list
442 context.bookmark_items = user_bookmark_list
445
443
446 # web case
444 # web case
447 if hasattr(request, 'user'):
445 if hasattr(request, 'user'):
448 context.auth_user = request.user
446 context.auth_user = request.user
449 context.rhodecode_user = request.user
447 context.rhodecode_user = request.user
450
448
451 # api case
449 # api case
452 if hasattr(request, 'rpc_user'):
450 if hasattr(request, 'rpc_user'):
453 context.auth_user = request.rpc_user
451 context.auth_user = request.rpc_user
454 context.rhodecode_user = request.rpc_user
452 context.rhodecode_user = request.rpc_user
455
453
456 # attach the whole call context to the request
454 # attach the whole call context to the request
457 request.set_call_context(context)
455 request.set_call_context(context)
458
456
459
457
460 def get_auth_user(request):
458 def get_auth_user(request):
461 environ = request.environ
459 environ = request.environ
462 session = request.session
460 session = request.session
463
461
464 ip_addr = get_ip_addr(environ)
462 ip_addr = get_ip_addr(environ)
465
463
466 # make sure that we update permissions each time we call controller
464 # make sure that we update permissions each time we call controller
467 _auth_token = (
465 _auth_token = (
468 # ?auth_token=XXX
466 # ?auth_token=XXX
469 request.GET.get('auth_token', '')
467 request.GET.get('auth_token', '')
470 # ?api_key=XXX !LEGACY
468 # ?api_key=XXX !LEGACY
471 or request.GET.get('api_key', '')
469 or request.GET.get('api_key', '')
472 # or headers....
470 # or headers....
473 or request.headers.get('X-Rc-Auth-Token', '')
471 or request.headers.get('X-Rc-Auth-Token', '')
474 )
472 )
475 if not _auth_token and request.matchdict:
473 if not _auth_token and request.matchdict:
476 url_auth_token = request.matchdict.get('_auth_token')
474 url_auth_token = request.matchdict.get('_auth_token')
477 _auth_token = url_auth_token
475 _auth_token = url_auth_token
478 if _auth_token:
476 if _auth_token:
479 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
477 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
480
478
481 if _auth_token:
479 if _auth_token:
482 # when using API_KEY we assume user exists, and
480 # when using API_KEY we assume user exists, and
483 # doesn't need auth based on cookies.
481 # doesn't need auth based on cookies.
484 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
482 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
485 authenticated = False
483 authenticated = False
486 else:
484 else:
487 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
485 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
488 try:
486 try:
489 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
487 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
490 ip_addr=ip_addr)
488 ip_addr=ip_addr)
491 except UserCreationError as e:
489 except UserCreationError as e:
492 h.flash(e, 'error')
490 h.flash(e, 'error')
493 # container auth or other auth functions that create users
491 # container auth or other auth functions that create users
494 # on the fly can throw this exception signaling that there's
492 # on the fly can throw this exception signaling that there's
495 # issue with user creation, explanation should be provided
493 # issue with user creation, explanation should be provided
496 # in Exception itself. We then create a simple blank
494 # in Exception itself. We then create a simple blank
497 # AuthUser
495 # AuthUser
498 auth_user = AuthUser(ip_addr=ip_addr)
496 auth_user = AuthUser(ip_addr=ip_addr)
499
497
500 # in case someone changes a password for user it triggers session
498 # in case someone changes a password for user it triggers session
501 # flush and forces a re-login
499 # flush and forces a re-login
502 if password_changed(auth_user, session):
500 if password_changed(auth_user, session):
503 session.invalidate()
501 session.invalidate()
504 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
502 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
505 auth_user = AuthUser(ip_addr=ip_addr)
503 auth_user = AuthUser(ip_addr=ip_addr)
506
504
507 authenticated = cookie_store.get('is_authenticated')
505 authenticated = cookie_store.get('is_authenticated')
508
506
509 if not auth_user.is_authenticated and auth_user.is_user_object:
507 if not auth_user.is_authenticated and auth_user.is_user_object:
510 # user is not authenticated and not empty
508 # user is not authenticated and not empty
511 auth_user.set_authenticated(authenticated)
509 auth_user.set_authenticated(authenticated)
512
510
513 return auth_user, _auth_token
511 return auth_user, _auth_token
514
512
515
513
516 def h_filter(s):
514 def h_filter(s):
517 """
515 """
518 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
516 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
519 we wrap this with additional functionality that converts None to empty
517 we wrap this with additional functionality that converts None to empty
520 strings
518 strings
521 """
519 """
522 if s is None:
520 if s is None:
523 return markupsafe.Markup()
521 return markupsafe.Markup()
524 return markupsafe.escape(s)
522 return markupsafe.escape(s)
525
523
526
524
527 def add_events_routes(config):
525 def add_events_routes(config):
528 """
526 """
529 Adds routing that can be used in events. Because some events are triggered
527 Adds routing that can be used in events. Because some events are triggered
530 outside of pyramid context, we need to bootstrap request with some
528 outside of pyramid context, we need to bootstrap request with some
531 routing registered
529 routing registered
532 """
530 """
533
531
534 from rhodecode.apps._base import ADMIN_PREFIX
532 from rhodecode.apps._base import ADMIN_PREFIX
535
533
536 config.add_route(name='home', pattern='/')
534 config.add_route(name='home', pattern='/')
537 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
535 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
538 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
536 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
539
537
540 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
538 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
541 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
539 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
542 config.add_route(name='repo_summary', pattern='/{repo_name}')
540 config.add_route(name='repo_summary', pattern='/{repo_name}')
543 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
541 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
544 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
542 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
545
543
546 config.add_route(name='pullrequest_show',
544 config.add_route(name='pullrequest_show',
547 pattern='/{repo_name}/pull-request/{pull_request_id}')
545 pattern='/{repo_name}/pull-request/{pull_request_id}')
548 config.add_route(name='pull_requests_global',
546 config.add_route(name='pull_requests_global',
549 pattern='/pull-request/{pull_request_id}')
547 pattern='/pull-request/{pull_request_id}')
550
548
551 config.add_route(name='repo_commit',
549 config.add_route(name='repo_commit',
552 pattern='/{repo_name}/changeset/{commit_id}')
550 pattern='/{repo_name}/changeset/{commit_id}')
553 config.add_route(name='repo_files',
551 config.add_route(name='repo_files',
554 pattern='/{repo_name}/files/{commit_id}/{f_path}')
552 pattern='/{repo_name}/files/{commit_id}/{f_path}')
555
553
556 config.add_route(name='hovercard_user',
554 config.add_route(name='hovercard_user',
557 pattern='/_hovercard/user/{user_id}')
555 pattern='/_hovercard/user/{user_id}')
558
556
559 config.add_route(name='hovercard_user_group',
557 config.add_route(name='hovercard_user_group',
560 pattern='/_hovercard/user_group/{user_group_id}')
558 pattern='/_hovercard/user_group/{user_group_id}')
561
559
562 config.add_route(name='hovercard_pull_request',
560 config.add_route(name='hovercard_pull_request',
563 pattern='/_hovercard/pull_request/{pull_request_id}')
561 pattern='/_hovercard/pull_request/{pull_request_id}')
564
562
565 config.add_route(name='hovercard_repo_commit',
563 config.add_route(name='hovercard_repo_commit',
566 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
564 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
567
565
568
566
569 def bootstrap_config(request, registry_name='RcTestRegistry'):
567 def bootstrap_config(request, registry_name='RcTestRegistry'):
570 from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults
568 from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults
571 import pyramid.testing
569 import pyramid.testing
572 registry = pyramid.testing.Registry(registry_name)
570 registry = pyramid.testing.Registry(registry_name)
573
571
574 global_config = {'__file__': ''}
572 global_config = {'__file__': ''}
575
573
576 config = pyramid.testing.setUp(registry=registry, request=request)
574 config = pyramid.testing.setUp(registry=registry, request=request)
577 sanitize_settings_and_apply_defaults(global_config, config.registry.settings)
575 sanitize_settings_and_apply_defaults(global_config, config.registry.settings)
578
576
579 # allow pyramid lookup in testing
577 # allow pyramid lookup in testing
580 config.include('pyramid_mako')
578 config.include('pyramid_mako')
581 config.include('rhodecode.lib.rc_beaker')
579 config.include('rhodecode.lib.rc_beaker')
582 config.include('rhodecode.lib.rc_cache')
580 config.include('rhodecode.lib.rc_cache')
583 config.include('rhodecode.lib.rc_cache.archive_cache')
581 config.include('rhodecode.lib.rc_cache.archive_cache')
584 add_events_routes(config)
582 add_events_routes(config)
585
583
586 return config
584 return config
587
585
588
586
589 def bootstrap_request(**kwargs):
587 def bootstrap_request(**kwargs):
590 """
588 """
591 Returns a thin version of Request Object that is used in non-web context like testing/celery
589 Returns a thin version of Request Object that is used in non-web context like testing/celery
592 """
590 """
593
591
594 import pyramid.testing
592 import pyramid.testing
595 from rhodecode.lib.request import ThinRequest as _ThinRequest
593 from rhodecode.lib.request import ThinRequest as _ThinRequest
596
594
597 class ThinRequest(_ThinRequest):
595 class ThinRequest(_ThinRequest):
598 application_url = kwargs.pop('application_url', 'http://example.com')
596 application_url = kwargs.pop('application_url', 'http://example.com')
599 host = kwargs.pop('host', 'example.com:80')
597 host = kwargs.pop('host', 'example.com:80')
600 domain = kwargs.pop('domain', 'example.com')
598 domain = kwargs.pop('domain', 'example.com')
601
599
602 class ThinSession(pyramid.testing.DummySession):
600 class ThinSession(pyramid.testing.DummySession):
603 def save(*arg, **kw):
601 def save(*arg, **kw):
604 pass
602 pass
605
603
606 request = ThinRequest(**kwargs)
604 request = ThinRequest(**kwargs)
607 request.session = ThinSession()
605 request.session = ThinSession()
608
606
609 return request
607 return request
@@ -1,701 +1,694 b''
1
1
2
2
3 # Copyright (C) 2014-2023 RhodeCode GmbH
3 # Copyright (C) 2014-2023 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 It's implemented with basic auth function
23 It's implemented with basic auth function
24 """
24 """
25
25
26 import os
26 import os
27 import re
27 import re
28 import io
28 import io
29 import logging
29 import logging
30 import importlib
30 import importlib
31 from functools import wraps
31 from functools import wraps
32 from lxml import etree
32 from lxml import etree
33
33
34 import time
34 import time
35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36
36
37 from pyramid.httpexceptions import (
37 from pyramid.httpexceptions import (
38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 from zope.cachedescriptors.property import Lazy as LazyProperty
39 from zope.cachedescriptors.property import Lazy as LazyProperty
40
40
41 import rhodecode
41 import rhodecode
42 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
42 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
43 from rhodecode.lib import rc_cache
43 from rhodecode.lib import rc_cache
44 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
44 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 from rhodecode.lib.base import (
45 from rhodecode.lib.base import (
46 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
46 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
47 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
48 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
48 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
49 from rhodecode.lib.middleware import appenlight
49 from rhodecode.lib.middleware import appenlight
50 from rhodecode.lib.middleware.utils import scm_app_http
50 from rhodecode.lib.middleware.utils import scm_app_http
51 from rhodecode.lib.str_utils import safe_bytes
51 from rhodecode.lib.str_utils import safe_bytes
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 from rhodecode.lib.vcs.backends import base
55 from rhodecode.lib.vcs.backends import base
56
56
57 from rhodecode.model import meta
57 from rhodecode.model import meta
58 from rhodecode.model.db import User, Repository, PullRequest
58 from rhodecode.model.db import User, Repository, PullRequest
59 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.scm import ScmModel
60 from rhodecode.model.pull_request import PullRequestModel
60 from rhodecode.model.pull_request import PullRequestModel
61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65
65
66 def extract_svn_txn_id(acl_repo_name, data: bytes):
66 def extract_svn_txn_id(acl_repo_name, data: bytes):
67 """
67 """
68 Helper method for extraction of svn txn_id from submitted XML data during
68 Helper method for extraction of svn txn_id from submitted XML data during
69 POST operations
69 POST operations
70 """
70 """
71
71
72 try:
72 try:
73 root = etree.fromstring(data)
73 root = etree.fromstring(data)
74 pat = re.compile(r'/txn/(?P<txn_id>.*)')
74 pat = re.compile(r'/txn/(?P<txn_id>.*)')
75 for el in root:
75 for el in root:
76 if el.tag == '{DAV:}source':
76 if el.tag == '{DAV:}source':
77 for sub_el in el:
77 for sub_el in el:
78 if sub_el.tag == '{DAV:}href':
78 if sub_el.tag == '{DAV:}href':
79 match = pat.search(sub_el.text)
79 match = pat.search(sub_el.text)
80 if match:
80 if match:
81 svn_tx_id = match.groupdict()['txn_id']
81 svn_tx_id = match.groupdict()['txn_id']
82 txn_id = rc_cache.utils.compute_key_from_params(
82 txn_id = rc_cache.utils.compute_key_from_params(
83 acl_repo_name, svn_tx_id)
83 acl_repo_name, svn_tx_id)
84 return txn_id
84 return txn_id
85 except Exception:
85 except Exception:
86 log.exception('Failed to extract txn_id')
86 log.exception('Failed to extract txn_id')
87
87
88
88
89 def initialize_generator(factory):
89 def initialize_generator(factory):
90 """
90 """
91 Initializes the returned generator by draining its first element.
91 Initializes the returned generator by draining its first element.
92
92
93 This can be used to give a generator an initializer, which is the code
93 This can be used to give a generator an initializer, which is the code
94 up to the first yield statement. This decorator enforces that the first
94 up to the first yield statement. This decorator enforces that the first
95 produced element has the value ``"__init__"`` to make its special
95 produced element has the value ``"__init__"`` to make its special
96 purpose very explicit in the using code.
96 purpose very explicit in the using code.
97 """
97 """
98
98
99 @wraps(factory)
99 @wraps(factory)
100 def wrapper(*args, **kwargs):
100 def wrapper(*args, **kwargs):
101 gen = factory(*args, **kwargs)
101 gen = factory(*args, **kwargs)
102 try:
102 try:
103 init = next(gen)
103 init = next(gen)
104 except StopIteration:
104 except StopIteration:
105 raise ValueError('Generator must yield at least one element.')
105 raise ValueError('Generator must yield at least one element.')
106 if init != "__init__":
106 if init != "__init__":
107 raise ValueError('First yielded element must be "__init__".')
107 raise ValueError('First yielded element must be "__init__".')
108 return gen
108 return gen
109 return wrapper
109 return wrapper
110
110
111
111
112 class SimpleVCS(object):
112 class SimpleVCS(object):
113 """Common functionality for SCM HTTP handlers."""
113 """Common functionality for SCM HTTP handlers."""
114
114
115 SCM = 'unknown'
115 SCM = 'unknown'
116
116
117 acl_repo_name = None
117 acl_repo_name = None
118 url_repo_name = None
118 url_repo_name = None
119 vcs_repo_name = None
119 vcs_repo_name = None
120 rc_extras = {}
120 rc_extras = {}
121
121
122 # We have to handle requests to shadow repositories different than requests
122 # We have to handle requests to shadow repositories different than requests
123 # to normal repositories. Therefore we have to distinguish them. To do this
123 # to normal repositories. Therefore we have to distinguish them. To do this
124 # we use this regex which will match only on URLs pointing to shadow
124 # we use this regex which will match only on URLs pointing to shadow
125 # repositories.
125 # repositories.
126 shadow_repo_re = re.compile(
126 shadow_repo_re = re.compile(
127 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
127 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
128 '(?P<target>{slug_pat})/' # target repo
128 '(?P<target>{slug_pat})/' # target repo
129 'pull-request/(?P<pr_id>\\d+)/' # pull request
129 'pull-request/(?P<pr_id>\\d+)/' # pull request
130 'repository$' # shadow repo
130 'repository$' # shadow repo
131 .format(slug_pat=SLUG_RE.pattern))
131 .format(slug_pat=SLUG_RE.pattern))
132
132
133 def __init__(self, config, registry):
133 def __init__(self, config, registry):
134 self.registry = registry
134 self.registry = registry
135 self.config = config
135 self.config = config
136 # re-populated by specialized middleware
136 # re-populated by specialized middleware
137 self.repo_vcs_config = base.Config()
137 self.repo_vcs_config = base.Config()
138
138
139 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
139 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
140 realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH'
140 realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH'
141
141
142 # authenticate this VCS request using authfunc
142 # authenticate this VCS request using authfunc
143 auth_ret_code_detection = \
143 auth_ret_code_detection = \
144 str2bool(self.config.get('auth_ret_code_detection', False))
144 str2bool(self.config.get('auth_ret_code_detection', False))
145 self.authenticate = BasicAuth(
145 self.authenticate = BasicAuth(
146 '', authenticate, registry, config.get('auth_ret_code'),
146 '', authenticate, registry, config.get('auth_ret_code'),
147 auth_ret_code_detection, rc_realm=realm)
147 auth_ret_code_detection, rc_realm=realm)
148 self.ip_addr = '0.0.0.0'
148 self.ip_addr = '0.0.0.0'
149
149
150 @LazyProperty
150 @LazyProperty
151 def global_vcs_config(self):
151 def global_vcs_config(self):
152 try:
152 try:
153 return VcsSettingsModel().get_ui_settings_as_config_obj()
153 return VcsSettingsModel().get_ui_settings_as_config_obj()
154 except Exception:
154 except Exception:
155 return base.Config()
155 return base.Config()
156
156
157 @property
157 @property
158 def base_path(self):
158 def base_path(self):
159 settings_path = self.repo_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
159 settings_path = self.config.get('repo_store.path')
160
161 if not settings_path:
162 settings_path = self.global_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
163
160
164 if not settings_path:
161 if not settings_path:
165 # try, maybe we passed in explicitly as config option
162 raise ValueError('FATAL: repo_store.path is empty')
166 settings_path = self.config.get('base_path')
167
168 if not settings_path:
169 raise ValueError('FATAL: base_path is empty')
170 return settings_path
163 return settings_path
171
164
172 def set_repo_names(self, environ):
165 def set_repo_names(self, environ):
173 """
166 """
174 This will populate the attributes acl_repo_name, url_repo_name,
167 This will populate the attributes acl_repo_name, url_repo_name,
175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
168 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
176 shadow) repositories all names are equal. In case of requests to a
169 shadow) repositories all names are equal. In case of requests to a
177 shadow repository the acl-name points to the target repo of the pull
170 shadow repository the acl-name points to the target repo of the pull
178 request and the vcs-name points to the shadow repo file system path.
171 request and the vcs-name points to the shadow repo file system path.
179 The url-name is always the URL used by the vcs client program.
172 The url-name is always the URL used by the vcs client program.
180
173
181 Example in case of a shadow repo:
174 Example in case of a shadow repo:
182 acl_repo_name = RepoGroup/MyRepo
175 acl_repo_name = RepoGroup/MyRepo
183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
176 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
177 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
185 """
178 """
186 # First we set the repo name from URL for all attributes. This is the
179 # First we set the repo name from URL for all attributes. This is the
187 # default if handling normal (non shadow) repo requests.
180 # default if handling normal (non shadow) repo requests.
188 self.url_repo_name = self._get_repository_name(environ)
181 self.url_repo_name = self._get_repository_name(environ)
189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
182 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
190 self.is_shadow_repo = False
183 self.is_shadow_repo = False
191
184
192 # Check if this is a request to a shadow repository.
185 # Check if this is a request to a shadow repository.
193 match = self.shadow_repo_re.match(self.url_repo_name)
186 match = self.shadow_repo_re.match(self.url_repo_name)
194 if match:
187 if match:
195 match_dict = match.groupdict()
188 match_dict = match.groupdict()
196
189
197 # Build acl repo name from regex match.
190 # Build acl repo name from regex match.
198 acl_repo_name = safe_str('{groups}{target}'.format(
191 acl_repo_name = safe_str('{groups}{target}'.format(
199 groups=match_dict['groups'] or '',
192 groups=match_dict['groups'] or '',
200 target=match_dict['target']))
193 target=match_dict['target']))
201
194
202 # Retrieve pull request instance by ID from regex match.
195 # Retrieve pull request instance by ID from regex match.
203 pull_request = PullRequest.get(match_dict['pr_id'])
196 pull_request = PullRequest.get(match_dict['pr_id'])
204
197
205 # Only proceed if we got a pull request and if acl repo name from
198 # Only proceed if we got a pull request and if acl repo name from
206 # URL equals the target repo name of the pull request.
199 # URL equals the target repo name of the pull request.
207 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
200 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
208
201
209 # Get file system path to shadow repository.
202 # Get file system path to shadow repository.
210 workspace_id = PullRequestModel()._workspace_id(pull_request)
203 workspace_id = PullRequestModel()._workspace_id(pull_request)
211 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
204 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
212
205
213 # Store names for later usage.
206 # Store names for later usage.
214 self.vcs_repo_name = vcs_repo_name
207 self.vcs_repo_name = vcs_repo_name
215 self.acl_repo_name = acl_repo_name
208 self.acl_repo_name = acl_repo_name
216 self.is_shadow_repo = True
209 self.is_shadow_repo = True
217
210
218 log.debug('Setting all VCS repository names: %s', {
211 log.debug('Setting all VCS repository names: %s', {
219 'acl_repo_name': self.acl_repo_name,
212 'acl_repo_name': self.acl_repo_name,
220 'url_repo_name': self.url_repo_name,
213 'url_repo_name': self.url_repo_name,
221 'vcs_repo_name': self.vcs_repo_name,
214 'vcs_repo_name': self.vcs_repo_name,
222 })
215 })
223
216
224 @property
217 @property
225 def scm_app(self):
218 def scm_app(self):
226 custom_implementation = self.config['vcs.scm_app_implementation']
219 custom_implementation = self.config['vcs.scm_app_implementation']
227 if custom_implementation == 'http':
220 if custom_implementation == 'http':
228 log.debug('Using HTTP implementation of scm app.')
221 log.debug('Using HTTP implementation of scm app.')
229 scm_app_impl = scm_app_http
222 scm_app_impl = scm_app_http
230 else:
223 else:
231 log.debug('Using custom implementation of scm_app: "{}"'.format(
224 log.debug('Using custom implementation of scm_app: "{}"'.format(
232 custom_implementation))
225 custom_implementation))
233 scm_app_impl = importlib.import_module(custom_implementation)
226 scm_app_impl = importlib.import_module(custom_implementation)
234 return scm_app_impl
227 return scm_app_impl
235
228
236 def _get_by_id(self, repo_name):
229 def _get_by_id(self, repo_name):
237 """
230 """
238 Gets a special pattern _<ID> from clone url and tries to replace it
231 Gets a special pattern _<ID> from clone url and tries to replace it
239 with a repository_name for support of _<ID> non changeable urls
232 with a repository_name for support of _<ID> non changeable urls
240 """
233 """
241
234
242 data = repo_name.split('/')
235 data = repo_name.split('/')
243 if len(data) >= 2:
236 if len(data) >= 2:
244 from rhodecode.model.repo import RepoModel
237 from rhodecode.model.repo import RepoModel
245 by_id_match = RepoModel().get_repo_by_id(repo_name)
238 by_id_match = RepoModel().get_repo_by_id(repo_name)
246 if by_id_match:
239 if by_id_match:
247 data[1] = by_id_match.repo_name
240 data[1] = by_id_match.repo_name
248
241
249 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
242 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
250 # and we use this data
243 # and we use this data
251 maybe_new_path = '/'.join(data)
244 maybe_new_path = '/'.join(data)
252 return safe_bytes(maybe_new_path).decode('latin1')
245 return safe_bytes(maybe_new_path).decode('latin1')
253
246
254 def _invalidate_cache(self, repo_name):
247 def _invalidate_cache(self, repo_name):
255 """
248 """
256 Set's cache for this repository for invalidation on next access
249 Set's cache for this repository for invalidation on next access
257
250
258 :param repo_name: full repo name, also a cache key
251 :param repo_name: full repo name, also a cache key
259 """
252 """
260 ScmModel().mark_for_invalidation(repo_name)
253 ScmModel().mark_for_invalidation(repo_name)
261
254
262 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
255 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
263 db_repo = Repository.get_by_repo_name(repo_name)
256 db_repo = Repository.get_by_repo_name(repo_name)
264 if not db_repo:
257 if not db_repo:
265 log.debug('Repository `%s` not found inside the database.',
258 log.debug('Repository `%s` not found inside the database.',
266 repo_name)
259 repo_name)
267 return False
260 return False
268
261
269 if db_repo.repo_type != scm_type:
262 if db_repo.repo_type != scm_type:
270 log.warning(
263 log.warning(
271 'Repository `%s` have incorrect scm_type, expected %s got %s',
264 'Repository `%s` have incorrect scm_type, expected %s got %s',
272 repo_name, db_repo.repo_type, scm_type)
265 repo_name, db_repo.repo_type, scm_type)
273 return False
266 return False
274
267
275 config = db_repo._config
268 config = db_repo._config
276 config.set('extensions', 'largefiles', '')
269 config.set('extensions', 'largefiles', '')
277 return is_valid_repo(
270 return is_valid_repo(
278 repo_name, base_path,
271 repo_name, base_path,
279 explicit_scm=scm_type, expect_scm=scm_type, config=config)
272 explicit_scm=scm_type, expect_scm=scm_type, config=config)
280
273
281 def valid_and_active_user(self, user):
274 def valid_and_active_user(self, user):
282 """
275 """
283 Checks if that user is not empty, and if it's actually object it checks
276 Checks if that user is not empty, and if it's actually object it checks
284 if he's active.
277 if he's active.
285
278
286 :param user: user object or None
279 :param user: user object or None
287 :return: boolean
280 :return: boolean
288 """
281 """
289 if user is None:
282 if user is None:
290 return False
283 return False
291
284
292 elif user.active:
285 elif user.active:
293 return True
286 return True
294
287
295 return False
288 return False
296
289
297 @property
290 @property
298 def is_shadow_repo_dir(self):
291 def is_shadow_repo_dir(self):
299 return os.path.isdir(self.vcs_repo_name)
292 return os.path.isdir(self.vcs_repo_name)
300
293
301 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
294 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
302 plugin_id='', plugin_cache_active=False, cache_ttl=0):
295 plugin_id='', plugin_cache_active=False, cache_ttl=0):
303 """
296 """
304 Checks permissions using action (push/pull) user and repository
297 Checks permissions using action (push/pull) user and repository
305 name. If plugin_cache and ttl is set it will use the plugin which
298 name. If plugin_cache and ttl is set it will use the plugin which
306 authenticated the user to store the cached permissions result for N
299 authenticated the user to store the cached permissions result for N
307 amount of seconds as in cache_ttl
300 amount of seconds as in cache_ttl
308
301
309 :param action: push or pull action
302 :param action: push or pull action
310 :param user: user instance
303 :param user: user instance
311 :param repo_name: repository name
304 :param repo_name: repository name
312 """
305 """
313
306
314 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
307 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
315 plugin_id, plugin_cache_active, cache_ttl)
308 plugin_id, plugin_cache_active, cache_ttl)
316
309
317 user_id = user.user_id
310 user_id = user.user_id
318 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
311 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
319 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
312 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
320
313
321 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
314 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
322 expiration_time=cache_ttl,
315 expiration_time=cache_ttl,
323 condition=plugin_cache_active)
316 condition=plugin_cache_active)
324 def compute_perm_vcs(
317 def compute_perm_vcs(
325 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
318 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
326
319
327 log.debug('auth: calculating permission access now...')
320 log.debug('auth: calculating permission access now...')
328 # check IP
321 # check IP
329 inherit = user.inherit_default_permissions
322 inherit = user.inherit_default_permissions
330 ip_allowed = AuthUser.check_ip_allowed(
323 ip_allowed = AuthUser.check_ip_allowed(
331 user_id, ip_addr, inherit_from_default=inherit)
324 user_id, ip_addr, inherit_from_default=inherit)
332 if ip_allowed:
325 if ip_allowed:
333 log.info('Access for IP:%s allowed', ip_addr)
326 log.info('Access for IP:%s allowed', ip_addr)
334 else:
327 else:
335 return False
328 return False
336
329
337 if action == 'push':
330 if action == 'push':
338 perms = ('repository.write', 'repository.admin')
331 perms = ('repository.write', 'repository.admin')
339 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
332 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
340 return False
333 return False
341
334
342 else:
335 else:
343 # any other action need at least read permission
336 # any other action need at least read permission
344 perms = (
337 perms = (
345 'repository.read', 'repository.write', 'repository.admin')
338 'repository.read', 'repository.write', 'repository.admin')
346 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
339 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
347 return False
340 return False
348
341
349 return True
342 return True
350
343
351 start = time.time()
344 start = time.time()
352 log.debug('Running plugin `%s` permissions check', plugin_id)
345 log.debug('Running plugin `%s` permissions check', plugin_id)
353
346
354 # for environ based auth, password can be empty, but then the validation is
347 # for environ based auth, password can be empty, but then the validation is
355 # on the server that fills in the env data needed for authentication
348 # on the server that fills in the env data needed for authentication
356 perm_result = compute_perm_vcs(
349 perm_result = compute_perm_vcs(
357 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
350 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
358
351
359 auth_time = time.time() - start
352 auth_time = time.time() - start
360 log.debug('Permissions for plugin `%s` completed in %.4fs, '
353 log.debug('Permissions for plugin `%s` completed in %.4fs, '
361 'expiration time of fetched cache %.1fs.',
354 'expiration time of fetched cache %.1fs.',
362 plugin_id, auth_time, cache_ttl)
355 plugin_id, auth_time, cache_ttl)
363
356
364 return perm_result
357 return perm_result
365
358
366 def _get_http_scheme(self, environ):
359 def _get_http_scheme(self, environ):
367 try:
360 try:
368 return environ['wsgi.url_scheme']
361 return environ['wsgi.url_scheme']
369 except Exception:
362 except Exception:
370 log.exception('Failed to read http scheme')
363 log.exception('Failed to read http scheme')
371 return 'http'
364 return 'http'
372
365
373 def _check_ssl(self, environ, start_response):
366 def _check_ssl(self, environ, start_response):
374 """
367 """
375 Checks the SSL check flag and returns False if SSL is not present
368 Checks the SSL check flag and returns False if SSL is not present
376 and required True otherwise
369 and required True otherwise
377 """
370 """
378 org_proto = environ['wsgi._org_proto']
371 org_proto = environ['wsgi._org_proto']
379 # check if we have SSL required ! if not it's a bad request !
372 # check if we have SSL required ! if not it's a bad request !
380 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
373 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
381 if require_ssl and org_proto == 'http':
374 if require_ssl and org_proto == 'http':
382 log.debug(
375 log.debug(
383 'Bad request: detected protocol is `%s` and '
376 'Bad request: detected protocol is `%s` and '
384 'SSL/HTTPS is required.', org_proto)
377 'SSL/HTTPS is required.', org_proto)
385 return False
378 return False
386 return True
379 return True
387
380
388 def _get_default_cache_ttl(self):
381 def _get_default_cache_ttl(self):
389 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
382 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
390 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
383 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
391 plugin_settings = plugin.get_settings()
384 plugin_settings = plugin.get_settings()
392 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
385 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
393 plugin_settings) or (False, 0)
386 plugin_settings) or (False, 0)
394 return plugin_cache_active, cache_ttl
387 return plugin_cache_active, cache_ttl
395
388
396 def __call__(self, environ, start_response):
389 def __call__(self, environ, start_response):
397 try:
390 try:
398 return self._handle_request(environ, start_response)
391 return self._handle_request(environ, start_response)
399 except Exception:
392 except Exception:
400 log.exception("Exception while handling request")
393 log.exception("Exception while handling request")
401 appenlight.track_exception(environ)
394 appenlight.track_exception(environ)
402 return HTTPInternalServerError()(environ, start_response)
395 return HTTPInternalServerError()(environ, start_response)
403 finally:
396 finally:
404 meta.Session.remove()
397 meta.Session.remove()
405
398
406 def _handle_request(self, environ, start_response):
399 def _handle_request(self, environ, start_response):
407 if not self._check_ssl(environ, start_response):
400 if not self._check_ssl(environ, start_response):
408 reason = ('SSL required, while RhodeCode was unable '
401 reason = ('SSL required, while RhodeCode was unable '
409 'to detect this as SSL request')
402 'to detect this as SSL request')
410 log.debug('User not allowed to proceed, %s', reason)
403 log.debug('User not allowed to proceed, %s', reason)
411 return HTTPNotAcceptable(reason)(environ, start_response)
404 return HTTPNotAcceptable(reason)(environ, start_response)
412
405
413 if not self.url_repo_name:
406 if not self.url_repo_name:
414 log.warning('Repository name is empty: %s', self.url_repo_name)
407 log.warning('Repository name is empty: %s', self.url_repo_name)
415 # failed to get repo name, we fail now
408 # failed to get repo name, we fail now
416 return HTTPNotFound()(environ, start_response)
409 return HTTPNotFound()(environ, start_response)
417 log.debug('Extracted repo name is %s', self.url_repo_name)
410 log.debug('Extracted repo name is %s', self.url_repo_name)
418
411
419 ip_addr = get_ip_addr(environ)
412 ip_addr = get_ip_addr(environ)
420 user_agent = get_user_agent(environ)
413 user_agent = get_user_agent(environ)
421 username = None
414 username = None
422
415
423 # skip passing error to error controller
416 # skip passing error to error controller
424 environ['pylons.status_code_redirect'] = True
417 environ['pylons.status_code_redirect'] = True
425
418
426 # ======================================================================
419 # ======================================================================
427 # GET ACTION PULL or PUSH
420 # GET ACTION PULL or PUSH
428 # ======================================================================
421 # ======================================================================
429 action = self._get_action(environ)
422 action = self._get_action(environ)
430
423
431 # ======================================================================
424 # ======================================================================
432 # Check if this is a request to a shadow repository of a pull request.
425 # Check if this is a request to a shadow repository of a pull request.
433 # In this case only pull action is allowed.
426 # In this case only pull action is allowed.
434 # ======================================================================
427 # ======================================================================
435 if self.is_shadow_repo and action != 'pull':
428 if self.is_shadow_repo and action != 'pull':
436 reason = 'Only pull action is allowed for shadow repositories.'
429 reason = 'Only pull action is allowed for shadow repositories.'
437 log.debug('User not allowed to proceed, %s', reason)
430 log.debug('User not allowed to proceed, %s', reason)
438 return HTTPNotAcceptable(reason)(environ, start_response)
431 return HTTPNotAcceptable(reason)(environ, start_response)
439
432
440 # Check if the shadow repo actually exists, in case someone refers
433 # Check if the shadow repo actually exists, in case someone refers
441 # to it, and it has been deleted because of successful merge.
434 # to it, and it has been deleted because of successful merge.
442 if self.is_shadow_repo and not self.is_shadow_repo_dir:
435 if self.is_shadow_repo and not self.is_shadow_repo_dir:
443 log.debug(
436 log.debug(
444 'Shadow repo detected, and shadow repo dir `%s` is missing',
437 'Shadow repo detected, and shadow repo dir `%s` is missing',
445 self.is_shadow_repo_dir)
438 self.is_shadow_repo_dir)
446 return HTTPNotFound()(environ, start_response)
439 return HTTPNotFound()(environ, start_response)
447
440
448 # ======================================================================
441 # ======================================================================
449 # CHECK ANONYMOUS PERMISSION
442 # CHECK ANONYMOUS PERMISSION
450 # ======================================================================
443 # ======================================================================
451 detect_force_push = False
444 detect_force_push = False
452 check_branch_perms = False
445 check_branch_perms = False
453 if action in ['pull', 'push']:
446 if action in ['pull', 'push']:
454 user_obj = anonymous_user = User.get_default_user()
447 user_obj = anonymous_user = User.get_default_user()
455 auth_user = user_obj.AuthUser()
448 auth_user = user_obj.AuthUser()
456 username = anonymous_user.username
449 username = anonymous_user.username
457 if anonymous_user.active:
450 if anonymous_user.active:
458 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
451 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
459 # ONLY check permissions if the user is activated
452 # ONLY check permissions if the user is activated
460 anonymous_perm = self._check_permission(
453 anonymous_perm = self._check_permission(
461 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
454 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
462 plugin_id='anonymous_access',
455 plugin_id='anonymous_access',
463 plugin_cache_active=plugin_cache_active,
456 plugin_cache_active=plugin_cache_active,
464 cache_ttl=cache_ttl,
457 cache_ttl=cache_ttl,
465 )
458 )
466 else:
459 else:
467 anonymous_perm = False
460 anonymous_perm = False
468
461
469 if not anonymous_user.active or not anonymous_perm:
462 if not anonymous_user.active or not anonymous_perm:
470 if not anonymous_user.active:
463 if not anonymous_user.active:
471 log.debug('Anonymous access is disabled, running '
464 log.debug('Anonymous access is disabled, running '
472 'authentication')
465 'authentication')
473
466
474 if not anonymous_perm:
467 if not anonymous_perm:
475 log.debug('Not enough credentials to access repo: `%s` '
468 log.debug('Not enough credentials to access repo: `%s` '
476 'repository as anonymous user', self.acl_repo_name)
469 'repository as anonymous user', self.acl_repo_name)
477
470
478
471
479 username = None
472 username = None
480 # ==============================================================
473 # ==============================================================
481 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
474 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
482 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
475 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
483 # ==============================================================
476 # ==============================================================
484
477
485 # try to auth based on environ, container auth methods
478 # try to auth based on environ, container auth methods
486 log.debug('Running PRE-AUTH for container|headers based authentication')
479 log.debug('Running PRE-AUTH for container|headers based authentication')
487
480
488 # headers auth, by just reading special headers and bypass the auth with user/passwd
481 # headers auth, by just reading special headers and bypass the auth with user/passwd
489 pre_auth = authenticate(
482 pre_auth = authenticate(
490 '', '', environ, VCS_TYPE, registry=self.registry,
483 '', '', environ, VCS_TYPE, registry=self.registry,
491 acl_repo_name=self.acl_repo_name)
484 acl_repo_name=self.acl_repo_name)
492
485
493 if pre_auth and pre_auth.get('username'):
486 if pre_auth and pre_auth.get('username'):
494 username = pre_auth['username']
487 username = pre_auth['username']
495 log.debug('PRE-AUTH got `%s` as username', username)
488 log.debug('PRE-AUTH got `%s` as username', username)
496 if pre_auth:
489 if pre_auth:
497 log.debug('PRE-AUTH successful from %s',
490 log.debug('PRE-AUTH successful from %s',
498 pre_auth.get('auth_data', {}).get('_plugin'))
491 pre_auth.get('auth_data', {}).get('_plugin'))
499
492
500 # If not authenticated by the container, running basic auth
493 # If not authenticated by the container, running basic auth
501 # before inject the calling repo_name for special scope checks
494 # before inject the calling repo_name for special scope checks
502 self.authenticate.acl_repo_name = self.acl_repo_name
495 self.authenticate.acl_repo_name = self.acl_repo_name
503
496
504 plugin_cache_active, cache_ttl = False, 0
497 plugin_cache_active, cache_ttl = False, 0
505 plugin = None
498 plugin = None
506
499
507 # regular auth chain
500 # regular auth chain
508 if not username:
501 if not username:
509 self.authenticate.realm = self.authenticate.get_rc_realm()
502 self.authenticate.realm = self.authenticate.get_rc_realm()
510
503
511 try:
504 try:
512 auth_result = self.authenticate(environ)
505 auth_result = self.authenticate(environ)
513 except (UserCreationError, NotAllowedToCreateUserError) as e:
506 except (UserCreationError, NotAllowedToCreateUserError) as e:
514 log.error(e)
507 log.error(e)
515 reason = safe_str(e)
508 reason = safe_str(e)
516 return HTTPNotAcceptable(reason)(environ, start_response)
509 return HTTPNotAcceptable(reason)(environ, start_response)
517
510
518 if isinstance(auth_result, dict):
511 if isinstance(auth_result, dict):
519 AUTH_TYPE.update(environ, 'basic')
512 AUTH_TYPE.update(environ, 'basic')
520 REMOTE_USER.update(environ, auth_result['username'])
513 REMOTE_USER.update(environ, auth_result['username'])
521 username = auth_result['username']
514 username = auth_result['username']
522 plugin = auth_result.get('auth_data', {}).get('_plugin')
515 plugin = auth_result.get('auth_data', {}).get('_plugin')
523 log.info(
516 log.info(
524 'MAIN-AUTH successful for user `%s` from %s plugin',
517 'MAIN-AUTH successful for user `%s` from %s plugin',
525 username, plugin)
518 username, plugin)
526
519
527 plugin_cache_active, cache_ttl = auth_result.get(
520 plugin_cache_active, cache_ttl = auth_result.get(
528 'auth_data', {}).get('_ttl_cache') or (False, 0)
521 'auth_data', {}).get('_ttl_cache') or (False, 0)
529 else:
522 else:
530 return auth_result.wsgi_application(environ, start_response)
523 return auth_result.wsgi_application(environ, start_response)
531
524
532 # ==============================================================
525 # ==============================================================
533 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
526 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
534 # ==============================================================
527 # ==============================================================
535 user = User.get_by_username(username)
528 user = User.get_by_username(username)
536 if not self.valid_and_active_user(user):
529 if not self.valid_and_active_user(user):
537 return HTTPForbidden()(environ, start_response)
530 return HTTPForbidden()(environ, start_response)
538 username = user.username
531 username = user.username
539 user_id = user.user_id
532 user_id = user.user_id
540
533
541 # check user attributes for password change flag
534 # check user attributes for password change flag
542 user_obj = user
535 user_obj = user
543 auth_user = user_obj.AuthUser()
536 auth_user = user_obj.AuthUser()
544 if user_obj and user_obj.username != User.DEFAULT_USER and \
537 if user_obj and user_obj.username != User.DEFAULT_USER and \
545 user_obj.user_data.get('force_password_change'):
538 user_obj.user_data.get('force_password_change'):
546 reason = 'password change required'
539 reason = 'password change required'
547 log.debug('User not allowed to authenticate, %s', reason)
540 log.debug('User not allowed to authenticate, %s', reason)
548 return HTTPNotAcceptable(reason)(environ, start_response)
541 return HTTPNotAcceptable(reason)(environ, start_response)
549
542
550 # check permissions for this repository
543 # check permissions for this repository
551 perm = self._check_permission(
544 perm = self._check_permission(
552 action, user, auth_user, self.acl_repo_name, ip_addr,
545 action, user, auth_user, self.acl_repo_name, ip_addr,
553 plugin, plugin_cache_active, cache_ttl)
546 plugin, plugin_cache_active, cache_ttl)
554 if not perm:
547 if not perm:
555 return HTTPForbidden()(environ, start_response)
548 return HTTPForbidden()(environ, start_response)
556 environ['rc_auth_user_id'] = str(user_id)
549 environ['rc_auth_user_id'] = str(user_id)
557
550
558 if action == 'push':
551 if action == 'push':
559 perms = auth_user.get_branch_permissions(self.acl_repo_name)
552 perms = auth_user.get_branch_permissions(self.acl_repo_name)
560 if perms:
553 if perms:
561 check_branch_perms = True
554 check_branch_perms = True
562 detect_force_push = True
555 detect_force_push = True
563
556
564 # extras are injected into UI object and later available
557 # extras are injected into UI object and later available
565 # in hooks executed by RhodeCode
558 # in hooks executed by RhodeCode
566 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
559 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
567
560
568 extras = vcs_operation_context(
561 extras = vcs_operation_context(
569 environ, repo_name=self.acl_repo_name, username=username,
562 environ, repo_name=self.acl_repo_name, username=username,
570 action=action, scm=self.SCM, check_locking=check_locking,
563 action=action, scm=self.SCM, check_locking=check_locking,
571 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
564 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
572 detect_force_push=detect_force_push
565 detect_force_push=detect_force_push
573 )
566 )
574
567
575 # ======================================================================
568 # ======================================================================
576 # REQUEST HANDLING
569 # REQUEST HANDLING
577 # ======================================================================
570 # ======================================================================
578 repo_path = os.path.join(
571 repo_path = os.path.join(
579 safe_str(self.base_path), safe_str(self.vcs_repo_name))
572 safe_str(self.base_path), safe_str(self.vcs_repo_name))
580 log.debug('Repository path is %s', repo_path)
573 log.debug('Repository path is %s', repo_path)
581
574
582 fix_PATH()
575 fix_PATH()
583
576
584 log.info(
577 log.info(
585 '%s action on %s repo "%s" by "%s" from %s %s',
578 '%s action on %s repo "%s" by "%s" from %s %s',
586 action, self.SCM, safe_str(self.url_repo_name),
579 action, self.SCM, safe_str(self.url_repo_name),
587 safe_str(username), ip_addr, user_agent)
580 safe_str(username), ip_addr, user_agent)
588
581
589 return self._generate_vcs_response(
582 return self._generate_vcs_response(
590 environ, start_response, repo_path, extras, action)
583 environ, start_response, repo_path, extras, action)
591
584
592 @initialize_generator
585 @initialize_generator
593 def _generate_vcs_response(
586 def _generate_vcs_response(
594 self, environ, start_response, repo_path, extras, action):
587 self, environ, start_response, repo_path, extras, action):
595 """
588 """
596 Returns a generator for the response content.
589 Returns a generator for the response content.
597
590
598 This method is implemented as a generator, so that it can trigger
591 This method is implemented as a generator, so that it can trigger
599 the cache validation after all content sent back to the client. It
592 the cache validation after all content sent back to the client. It
600 also handles the locking exceptions which will be triggered when
593 also handles the locking exceptions which will be triggered when
601 the first chunk is produced by the underlying WSGI application.
594 the first chunk is produced by the underlying WSGI application.
602 """
595 """
603
596
604 txn_id = ''
597 txn_id = ''
605 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
598 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
606 # case for SVN, we want to re-use the callback daemon port
599 # case for SVN, we want to re-use the callback daemon port
607 # so we use the txn_id, for this we peek the body, and still save
600 # so we use the txn_id, for this we peek the body, and still save
608 # it as wsgi.input
601 # it as wsgi.input
609
602
610 stream = environ['wsgi.input']
603 stream = environ['wsgi.input']
611
604
612 if isinstance(stream, io.BytesIO):
605 if isinstance(stream, io.BytesIO):
613 data: bytes = stream.getvalue()
606 data: bytes = stream.getvalue()
614 elif hasattr(stream, 'buf'): # most likely gunicorn.http.body.Body
607 elif hasattr(stream, 'buf'): # most likely gunicorn.http.body.Body
615 data: bytes = stream.buf.getvalue()
608 data: bytes = stream.buf.getvalue()
616 else:
609 else:
617 # fallback to the crudest way, copy the iterator
610 # fallback to the crudest way, copy the iterator
618 data = safe_bytes(stream.read())
611 data = safe_bytes(stream.read())
619 environ['wsgi.input'] = io.BytesIO(data)
612 environ['wsgi.input'] = io.BytesIO(data)
620
613
621 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
614 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
622
615
623 callback_daemon, extras = self._prepare_callback_daemon(
616 callback_daemon, extras = self._prepare_callback_daemon(
624 extras, environ, action, txn_id=txn_id)
617 extras, environ, action, txn_id=txn_id)
625 log.debug('HOOKS extras is %s', extras)
618 log.debug('HOOKS extras is %s', extras)
626
619
627 http_scheme = self._get_http_scheme(environ)
620 http_scheme = self._get_http_scheme(environ)
628
621
629 config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme)
622 config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme)
630 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
623 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
631 with callback_daemon:
624 with callback_daemon:
632 app.rc_extras = extras
625 app.rc_extras = extras
633
626
634 try:
627 try:
635 response = app(environ, start_response)
628 response = app(environ, start_response)
636 finally:
629 finally:
637 # This statement works together with the decorator
630 # This statement works together with the decorator
638 # "initialize_generator" above. The decorator ensures that
631 # "initialize_generator" above. The decorator ensures that
639 # we hit the first yield statement before the generator is
632 # we hit the first yield statement before the generator is
640 # returned back to the WSGI server. This is needed to
633 # returned back to the WSGI server. This is needed to
641 # ensure that the call to "app" above triggers the
634 # ensure that the call to "app" above triggers the
642 # needed callback to "start_response" before the
635 # needed callback to "start_response" before the
643 # generator is actually used.
636 # generator is actually used.
644 yield "__init__"
637 yield "__init__"
645
638
646 # iter content
639 # iter content
647 for chunk in response:
640 for chunk in response:
648 yield chunk
641 yield chunk
649
642
650 try:
643 try:
651 # invalidate cache on push
644 # invalidate cache on push
652 if action == 'push':
645 if action == 'push':
653 self._invalidate_cache(self.url_repo_name)
646 self._invalidate_cache(self.url_repo_name)
654 finally:
647 finally:
655 meta.Session.remove()
648 meta.Session.remove()
656
649
657 def _get_repository_name(self, environ):
650 def _get_repository_name(self, environ):
658 """Get repository name out of the environmnent
651 """Get repository name out of the environmnent
659
652
660 :param environ: WSGI environment
653 :param environ: WSGI environment
661 """
654 """
662 raise NotImplementedError()
655 raise NotImplementedError()
663
656
664 def _get_action(self, environ):
657 def _get_action(self, environ):
665 """Map request commands into a pull or push command.
658 """Map request commands into a pull or push command.
666
659
667 :param environ: WSGI environment
660 :param environ: WSGI environment
668 """
661 """
669 raise NotImplementedError()
662 raise NotImplementedError()
670
663
671 def _create_wsgi_app(self, repo_path, repo_name, config):
664 def _create_wsgi_app(self, repo_path, repo_name, config):
672 """Return the WSGI app that will finally handle the request."""
665 """Return the WSGI app that will finally handle the request."""
673 raise NotImplementedError()
666 raise NotImplementedError()
674
667
675 def _create_config(self, extras, repo_name, scheme='http'):
668 def _create_config(self, extras, repo_name, scheme='http'):
676 """Create a safe config representation."""
669 """Create a safe config representation."""
677 raise NotImplementedError()
670 raise NotImplementedError()
678
671
679 def _should_use_callback_daemon(self, extras, environ, action):
672 def _should_use_callback_daemon(self, extras, environ, action):
680 if extras.get('is_shadow_repo'):
673 if extras.get('is_shadow_repo'):
681 # we don't want to execute hooks, and callback daemon for shadow repos
674 # we don't want to execute hooks, and callback daemon for shadow repos
682 return False
675 return False
683 return True
676 return True
684
677
685 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
678 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
686 protocol = vcs_settings.HOOKS_PROTOCOL
679 protocol = vcs_settings.HOOKS_PROTOCOL
687 if not self._should_use_callback_daemon(extras, environ, action):
680 if not self._should_use_callback_daemon(extras, environ, action):
688 # disable callback daemon for actions that don't require it
681 # disable callback daemon for actions that don't require it
689 protocol = 'local'
682 protocol = 'local'
690
683
691 return prepare_callback_daemon(
684 return prepare_callback_daemon(
692 extras, protocol=protocol,
685 extras, protocol=protocol,
693 host=vcs_settings.HOOKS_HOST, txn_id=txn_id)
686 host=vcs_settings.HOOKS_HOST, txn_id=txn_id)
694
687
695
688
696 def _should_check_locking(query_string):
689 def _should_check_locking(query_string):
697 # this is kind of hacky, but due to how mercurial handles client-server
690 # this is kind of hacky, but due to how mercurial handles client-server
698 # server see all operation on commit; bookmarks, phases and
691 # server see all operation on commit; bookmarks, phases and
699 # obsolescence marker in different transaction, we don't want to check
692 # obsolescence marker in different transaction, we don't want to check
700 # locking on those
693 # locking on those
701 return query_string not in ['cmd=listkeys']
694 return query_string not in ['cmd=listkeys']
@@ -1,845 +1,844 b''
1 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 import os
20 import os
21 import sys
21 import sys
22 import time
22 import time
23 import platform
23 import platform
24 import collections
24 import collections
25 import psutil
25 import psutil
26 from functools import wraps
26 from functools import wraps
27
27
28 import pkg_resources
28 import pkg_resources
29 import logging
29 import logging
30 import resource
30 import resource
31
31
32 import configparser
32 import configparser
33
33
34 from rc_license.models import LicenseModel
34 from rc_license.models import LicenseModel
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 _NA = 'NOT AVAILABLE'
40 _NA = 'NOT AVAILABLE'
41 _NA_FLOAT = 0.0
41 _NA_FLOAT = 0.0
42
42
43 STATE_OK = 'ok'
43 STATE_OK = 'ok'
44 STATE_ERR = 'error'
44 STATE_ERR = 'error'
45 STATE_WARN = 'warning'
45 STATE_WARN = 'warning'
46
46
47 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
47 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
48
48
49
49
50 registered_helpers = {}
50 registered_helpers = {}
51
51
52
52
53 def register_sysinfo(func):
53 def register_sysinfo(func):
54 """
54 """
55 @register_helper
55 @register_helper
56 def db_check():
56 def db_check():
57 pass
57 pass
58
58
59 db_check == registered_helpers['db_check']
59 db_check == registered_helpers['db_check']
60 """
60 """
61 global registered_helpers
61 global registered_helpers
62 registered_helpers[func.__name__] = func
62 registered_helpers[func.__name__] = func
63
63
64 @wraps(func)
64 @wraps(func)
65 def _wrapper(*args, **kwargs):
65 def _wrapper(*args, **kwargs):
66 return func(*args, **kwargs)
66 return func(*args, **kwargs)
67 return _wrapper
67 return _wrapper
68
68
69
69
70 # HELPERS
70 # HELPERS
71 def percentage(part: (int, float), whole: (int, float)):
71 def percentage(part: (int, float), whole: (int, float)):
72 whole = float(whole)
72 whole = float(whole)
73 if whole > 0:
73 if whole > 0:
74 return round(100 * float(part) / whole, 1)
74 return round(100 * float(part) / whole, 1)
75 return 0.0
75 return 0.0
76
76
77
77
78 def get_storage_size(storage_path):
78 def get_storage_size(storage_path):
79 sizes = []
79 sizes = []
80 for file_ in os.listdir(storage_path):
80 for file_ in os.listdir(storage_path):
81 storage_file = os.path.join(storage_path, file_)
81 storage_file = os.path.join(storage_path, file_)
82 if os.path.isfile(storage_file):
82 if os.path.isfile(storage_file):
83 try:
83 try:
84 sizes.append(os.path.getsize(storage_file))
84 sizes.append(os.path.getsize(storage_file))
85 except OSError:
85 except OSError:
86 log.exception('Failed to get size of storage file %s', storage_file)
86 log.exception('Failed to get size of storage file %s', storage_file)
87 pass
87 pass
88
88
89 return sum(sizes)
89 return sum(sizes)
90
90
91
91
92 def get_resource(resource_type):
92 def get_resource(resource_type):
93 try:
93 try:
94 return resource.getrlimit(resource_type)
94 return resource.getrlimit(resource_type)
95 except Exception:
95 except Exception:
96 return 'NOT_SUPPORTED'
96 return 'NOT_SUPPORTED'
97
97
98
98
99 def get_cert_path(ini_path):
99 def get_cert_path(ini_path):
100 default = '/etc/ssl/certs/ca-certificates.crt'
100 default = '/etc/ssl/certs/ca-certificates.crt'
101 control_ca_bundle = os.path.join(
101 control_ca_bundle = os.path.join(
102 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
102 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
103 '.rccontrol-profile/etc/ca-bundle.crt')
103 '.rccontrol-profile/etc/ca-bundle.crt')
104 if os.path.isfile(control_ca_bundle):
104 if os.path.isfile(control_ca_bundle):
105 default = control_ca_bundle
105 default = control_ca_bundle
106
106
107 return default
107 return default
108
108
109
109
110 class SysInfoRes(object):
110 class SysInfoRes(object):
111 def __init__(self, value, state=None, human_value=None):
111 def __init__(self, value, state=None, human_value=None):
112 self.value = value
112 self.value = value
113 self.state = state or STATE_OK_DEFAULT
113 self.state = state or STATE_OK_DEFAULT
114 self.human_value = human_value or value
114 self.human_value = human_value or value
115
115
116 def __json__(self):
116 def __json__(self):
117 return {
117 return {
118 'value': self.value,
118 'value': self.value,
119 'state': self.state,
119 'state': self.state,
120 'human_value': self.human_value,
120 'human_value': self.human_value,
121 }
121 }
122
122
123 def get_value(self):
123 def get_value(self):
124 return self.__json__()
124 return self.__json__()
125
125
126 def __str__(self):
126 def __str__(self):
127 return f'<SysInfoRes({self.__json__()})>'
127 return f'<SysInfoRes({self.__json__()})>'
128
128
129
129
130 class SysInfo(object):
130 class SysInfo(object):
131
131
132 def __init__(self, func_name, **kwargs):
132 def __init__(self, func_name, **kwargs):
133 self.function_name = func_name
133 self.function_name = func_name
134 self.value = _NA
134 self.value = _NA
135 self.state = None
135 self.state = None
136 self.kwargs = kwargs or {}
136 self.kwargs = kwargs or {}
137
137
138 def __call__(self):
138 def __call__(self):
139 computed = self.compute(**self.kwargs)
139 computed = self.compute(**self.kwargs)
140 if not isinstance(computed, SysInfoRes):
140 if not isinstance(computed, SysInfoRes):
141 raise ValueError(
141 raise ValueError(
142 'computed value for {} is not instance of '
142 'computed value for {} is not instance of '
143 '{}, got {} instead'.format(
143 '{}, got {} instead'.format(
144 self.function_name, SysInfoRes, type(computed)))
144 self.function_name, SysInfoRes, type(computed)))
145 return computed.__json__()
145 return computed.__json__()
146
146
147 def __str__(self):
147 def __str__(self):
148 return f'<SysInfo({self.function_name})>'
148 return f'<SysInfo({self.function_name})>'
149
149
150 def compute(self, **kwargs):
150 def compute(self, **kwargs):
151 return self.function_name(**kwargs)
151 return self.function_name(**kwargs)
152
152
153
153
154 # SysInfo functions
154 # SysInfo functions
155 @register_sysinfo
155 @register_sysinfo
156 def python_info():
156 def python_info():
157 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
157 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
158 executable=sys.executable)
158 executable=sys.executable)
159 return SysInfoRes(value=value)
159 return SysInfoRes(value=value)
160
160
161
161
162 @register_sysinfo
162 @register_sysinfo
163 def py_modules():
163 def py_modules():
164 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
164 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
165 for p in pkg_resources.working_set])
165 for p in pkg_resources.working_set])
166
166
167 value = sorted(mods.items(), key=lambda k: k[0].lower())
167 value = sorted(mods.items(), key=lambda k: k[0].lower())
168 return SysInfoRes(value=value)
168 return SysInfoRes(value=value)
169
169
170
170
171 @register_sysinfo
171 @register_sysinfo
172 def platform_type():
172 def platform_type():
173 from rhodecode.lib.utils import generate_platform_uuid
173 from rhodecode.lib.utils import generate_platform_uuid
174
174
175 value = dict(
175 value = dict(
176 name=safe_str(platform.platform()),
176 name=safe_str(platform.platform()),
177 uuid=generate_platform_uuid()
177 uuid=generate_platform_uuid()
178 )
178 )
179 return SysInfoRes(value=value)
179 return SysInfoRes(value=value)
180
180
181
181
182 @register_sysinfo
182 @register_sysinfo
183 def locale_info():
183 def locale_info():
184 import locale
184 import locale
185
185
186 def safe_get_locale(locale_name):
186 def safe_get_locale(locale_name):
187 try:
187 try:
188 locale.getlocale(locale_name)
188 locale.getlocale(locale_name)
189 except TypeError:
189 except TypeError:
190 return f'FAILED_LOCALE_GET:{locale_name}'
190 return f'FAILED_LOCALE_GET:{locale_name}'
191
191
192 value = dict(
192 value = dict(
193 locale_default=locale.getlocale(),
193 locale_default=locale.getlocale(),
194 locale_lc_all=safe_get_locale(locale.LC_ALL),
194 locale_lc_all=safe_get_locale(locale.LC_ALL),
195 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
195 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
196 lang_env=os.environ.get('LANG'),
196 lang_env=os.environ.get('LANG'),
197 lc_all_env=os.environ.get('LC_ALL'),
197 lc_all_env=os.environ.get('LC_ALL'),
198 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
198 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
199 )
199 )
200 human_value = \
200 human_value = \
201 f"LANG: {value['lang_env']}, \
201 f"LANG: {value['lang_env']}, \
202 locale LC_ALL: {value['locale_lc_all']}, \
202 locale LC_ALL: {value['locale_lc_all']}, \
203 locale LC_CTYPE: {value['locale_lc_ctype']}, \
203 locale LC_CTYPE: {value['locale_lc_ctype']}, \
204 Default locales: {value['locale_default']}"
204 Default locales: {value['locale_default']}"
205
205
206 return SysInfoRes(value=value, human_value=human_value)
206 return SysInfoRes(value=value, human_value=human_value)
207
207
208
208
209 @register_sysinfo
209 @register_sysinfo
210 def ulimit_info():
210 def ulimit_info():
211 data = collections.OrderedDict([
211 data = collections.OrderedDict([
212 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
212 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
213 ('file size', get_resource(resource.RLIMIT_FSIZE)),
213 ('file size', get_resource(resource.RLIMIT_FSIZE)),
214 ('stack size', get_resource(resource.RLIMIT_STACK)),
214 ('stack size', get_resource(resource.RLIMIT_STACK)),
215 ('core file size', get_resource(resource.RLIMIT_CORE)),
215 ('core file size', get_resource(resource.RLIMIT_CORE)),
216 ('address space size', get_resource(resource.RLIMIT_AS)),
216 ('address space size', get_resource(resource.RLIMIT_AS)),
217 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
217 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
218 ('heap size', get_resource(resource.RLIMIT_DATA)),
218 ('heap size', get_resource(resource.RLIMIT_DATA)),
219 ('rss size', get_resource(resource.RLIMIT_RSS)),
219 ('rss size', get_resource(resource.RLIMIT_RSS)),
220 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
220 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
221 ('open files', get_resource(resource.RLIMIT_NOFILE)),
221 ('open files', get_resource(resource.RLIMIT_NOFILE)),
222 ])
222 ])
223
223
224 text = ', '.join(f'{k}:{v}' for k, v in data.items())
224 text = ', '.join(f'{k}:{v}' for k, v in data.items())
225
225
226 value = {
226 value = {
227 'limits': data,
227 'limits': data,
228 'text': text,
228 'text': text,
229 }
229 }
230 return SysInfoRes(value=value)
230 return SysInfoRes(value=value)
231
231
232
232
233 @register_sysinfo
233 @register_sysinfo
234 def uptime():
234 def uptime():
235 from rhodecode.lib.helpers import age, time_to_datetime
235 from rhodecode.lib.helpers import age, time_to_datetime
236 from rhodecode.translation import TranslationString
236 from rhodecode.translation import TranslationString
237
237
238 value = dict(boot_time=0, uptime=0, text='')
238 value = dict(boot_time=0, uptime=0, text='')
239 state = STATE_OK_DEFAULT
239 state = STATE_OK_DEFAULT
240
240
241 boot_time = psutil.boot_time()
241 boot_time = psutil.boot_time()
242 value['boot_time'] = boot_time
242 value['boot_time'] = boot_time
243 value['uptime'] = time.time() - boot_time
243 value['uptime'] = time.time() - boot_time
244
244
245 date_or_age = age(time_to_datetime(boot_time))
245 date_or_age = age(time_to_datetime(boot_time))
246 if isinstance(date_or_age, TranslationString):
246 if isinstance(date_or_age, TranslationString):
247 date_or_age = date_or_age.interpolate()
247 date_or_age = date_or_age.interpolate()
248
248
249 human_value = value.copy()
249 human_value = value.copy()
250 human_value['boot_time'] = time_to_datetime(boot_time)
250 human_value['boot_time'] = time_to_datetime(boot_time)
251 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
251 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
252
252
253 human_value['text'] = f'Server started {date_or_age}'
253 human_value['text'] = f'Server started {date_or_age}'
254 return SysInfoRes(value=value, human_value=human_value)
254 return SysInfoRes(value=value, human_value=human_value)
255
255
256
256
257 @register_sysinfo
257 @register_sysinfo
258 def memory():
258 def memory():
259 from rhodecode.lib.helpers import format_byte_size_binary
259 from rhodecode.lib.helpers import format_byte_size_binary
260 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
260 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
261 percent_used=0, free=0, inactive=0, active=0, shared=0,
261 percent_used=0, free=0, inactive=0, active=0, shared=0,
262 total=0, buffers=0, text='')
262 total=0, buffers=0, text='')
263
263
264 state = STATE_OK_DEFAULT
264 state = STATE_OK_DEFAULT
265
265
266 value.update(dict(psutil.virtual_memory()._asdict()))
266 value.update(dict(psutil.virtual_memory()._asdict()))
267 value['used_real'] = value['total'] - value['available']
267 value['used_real'] = value['total'] - value['available']
268 value['percent_used'] = psutil._common.usage_percent(value['used_real'], value['total'], 1)
268 value['percent_used'] = psutil._common.usage_percent(value['used_real'], value['total'], 1)
269
269
270 human_value = value.copy()
270 human_value = value.copy()
271 human_value['text'] = '{}/{}, {}% used'.format(
271 human_value['text'] = '{}/{}, {}% used'.format(
272 format_byte_size_binary(value['used_real']),
272 format_byte_size_binary(value['used_real']),
273 format_byte_size_binary(value['total']),
273 format_byte_size_binary(value['total']),
274 value['percent_used'])
274 value['percent_used'])
275
275
276 keys = list(value.keys())[::]
276 keys = list(value.keys())[::]
277 keys.pop(keys.index('percent'))
277 keys.pop(keys.index('percent'))
278 keys.pop(keys.index('percent_used'))
278 keys.pop(keys.index('percent_used'))
279 keys.pop(keys.index('text'))
279 keys.pop(keys.index('text'))
280 for k in keys:
280 for k in keys:
281 human_value[k] = format_byte_size_binary(value[k])
281 human_value[k] = format_byte_size_binary(value[k])
282
282
283 if state['type'] == STATE_OK and value['percent_used'] > 90:
283 if state['type'] == STATE_OK and value['percent_used'] > 90:
284 msg = 'Critical: your available RAM memory is very low.'
284 msg = 'Critical: your available RAM memory is very low.'
285 state = {'message': msg, 'type': STATE_ERR}
285 state = {'message': msg, 'type': STATE_ERR}
286
286
287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
288 msg = 'Warning: your available RAM memory is running low.'
288 msg = 'Warning: your available RAM memory is running low.'
289 state = {'message': msg, 'type': STATE_WARN}
289 state = {'message': msg, 'type': STATE_WARN}
290
290
291 return SysInfoRes(value=value, state=state, human_value=human_value)
291 return SysInfoRes(value=value, state=state, human_value=human_value)
292
292
293
293
294 @register_sysinfo
294 @register_sysinfo
295 def machine_load():
295 def machine_load():
296 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
296 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
297 state = STATE_OK_DEFAULT
297 state = STATE_OK_DEFAULT
298
298
299 # load averages
299 # load averages
300 if hasattr(psutil.os, 'getloadavg'):
300 if hasattr(psutil.os, 'getloadavg'):
301 value.update(dict(
301 value.update(dict(
302 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
302 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
303 ))
303 ))
304
304
305 human_value = value.copy()
305 human_value = value.copy()
306 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
306 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
307 value['1_min'], value['5_min'], value['15_min'])
307 value['1_min'], value['5_min'], value['15_min'])
308
308
309 if state['type'] == STATE_OK and value['15_min'] > 5.0:
309 if state['type'] == STATE_OK and value['15_min'] > 5.0:
310 msg = 'Warning: your machine load is very high.'
310 msg = 'Warning: your machine load is very high.'
311 state = {'message': msg, 'type': STATE_WARN}
311 state = {'message': msg, 'type': STATE_WARN}
312
312
313 return SysInfoRes(value=value, state=state, human_value=human_value)
313 return SysInfoRes(value=value, state=state, human_value=human_value)
314
314
315
315
316 @register_sysinfo
316 @register_sysinfo
317 def cpu():
317 def cpu():
318 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
318 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
319 state = STATE_OK_DEFAULT
319 state = STATE_OK_DEFAULT
320
320
321 value['cpu'] = psutil.cpu_percent(0.5)
321 value['cpu'] = psutil.cpu_percent(0.5)
322 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
322 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
323 value['cpu_count'] = psutil.cpu_count()
323 value['cpu_count'] = psutil.cpu_count()
324
324
325 human_value = value.copy()
325 human_value = value.copy()
326 human_value['text'] = '{} cores at {} %'.format(value['cpu_count'], value['cpu'])
326 human_value['text'] = '{} cores at {} %'.format(value['cpu_count'], value['cpu'])
327
327
328 return SysInfoRes(value=value, state=state, human_value=human_value)
328 return SysInfoRes(value=value, state=state, human_value=human_value)
329
329
330
330
331 @register_sysinfo
331 @register_sysinfo
332 def storage():
332 def storage():
333 from rhodecode.lib.helpers import format_byte_size_binary
333 from rhodecode.lib.helpers import format_byte_size_binary
334 from rhodecode.model.settings import VcsSettingsModel
334 from rhodecode.lib.utils import get_rhodecode_repo_store_path
335 path = VcsSettingsModel().get_repos_location()
335 path = get_rhodecode_repo_store_path()
336
336
337 value = dict(percent=0, used=0, total=0, path=path, text='')
337 value = dict(percent=0, used=0, total=0, path=path, text='')
338 state = STATE_OK_DEFAULT
338 state = STATE_OK_DEFAULT
339
339
340 try:
340 try:
341 value.update(dict(psutil.disk_usage(path)._asdict()))
341 value.update(dict(psutil.disk_usage(path)._asdict()))
342 except Exception as e:
342 except Exception as e:
343 log.exception('Failed to fetch disk info')
343 log.exception('Failed to fetch disk info')
344 state = {'message': str(e), 'type': STATE_ERR}
344 state = {'message': str(e), 'type': STATE_ERR}
345
345
346 human_value = value.copy()
346 human_value = value.copy()
347 human_value['used'] = format_byte_size_binary(value['used'])
347 human_value['used'] = format_byte_size_binary(value['used'])
348 human_value['total'] = format_byte_size_binary(value['total'])
348 human_value['total'] = format_byte_size_binary(value['total'])
349 human_value['text'] = "{}/{}, {}% used".format(
349 human_value['text'] = "{}/{}, {}% used".format(
350 format_byte_size_binary(value['used']),
350 format_byte_size_binary(value['used']),
351 format_byte_size_binary(value['total']),
351 format_byte_size_binary(value['total']),
352 value['percent'])
352 value['percent'])
353
353
354 if state['type'] == STATE_OK and value['percent'] > 90:
354 if state['type'] == STATE_OK and value['percent'] > 90:
355 msg = 'Critical: your disk space is very low.'
355 msg = 'Critical: your disk space is very low.'
356 state = {'message': msg, 'type': STATE_ERR}
356 state = {'message': msg, 'type': STATE_ERR}
357
357
358 elif state['type'] == STATE_OK and value['percent'] > 70:
358 elif state['type'] == STATE_OK and value['percent'] > 70:
359 msg = 'Warning: your disk space is running low.'
359 msg = 'Warning: your disk space is running low.'
360 state = {'message': msg, 'type': STATE_WARN}
360 state = {'message': msg, 'type': STATE_WARN}
361
361
362 return SysInfoRes(value=value, state=state, human_value=human_value)
362 return SysInfoRes(value=value, state=state, human_value=human_value)
363
363
364
364
365 @register_sysinfo
365 @register_sysinfo
366 def storage_inodes():
366 def storage_inodes():
367 from rhodecode.model.settings import VcsSettingsModel
367 from rhodecode.lib.utils import get_rhodecode_repo_store_path
368 path = VcsSettingsModel().get_repos_location()
368 path = get_rhodecode_repo_store_path()
369
369
370 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
370 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
371 state = STATE_OK_DEFAULT
371 state = STATE_OK_DEFAULT
372
372
373 try:
373 try:
374 i_stat = os.statvfs(path)
374 i_stat = os.statvfs(path)
375 value['free'] = i_stat.f_ffree
375 value['free'] = i_stat.f_ffree
376 value['used'] = i_stat.f_files-i_stat.f_favail
376 value['used'] = i_stat.f_files-i_stat.f_favail
377 value['total'] = i_stat.f_files
377 value['total'] = i_stat.f_files
378 value['percent'] = percentage(value['used'], value['total'])
378 value['percent'] = percentage(value['used'], value['total'])
379 except Exception as e:
379 except Exception as e:
380 log.exception('Failed to fetch disk inodes info')
380 log.exception('Failed to fetch disk inodes info')
381 state = {'message': str(e), 'type': STATE_ERR}
381 state = {'message': str(e), 'type': STATE_ERR}
382
382
383 human_value = value.copy()
383 human_value = value.copy()
384 human_value['text'] = "{}/{}, {}% used".format(
384 human_value['text'] = "{}/{}, {}% used".format(
385 value['used'], value['total'], value['percent'])
385 value['used'], value['total'], value['percent'])
386
386
387 if state['type'] == STATE_OK and value['percent'] > 90:
387 if state['type'] == STATE_OK and value['percent'] > 90:
388 msg = 'Critical: your disk free inodes are very low.'
388 msg = 'Critical: your disk free inodes are very low.'
389 state = {'message': msg, 'type': STATE_ERR}
389 state = {'message': msg, 'type': STATE_ERR}
390
390
391 elif state['type'] == STATE_OK and value['percent'] > 70:
391 elif state['type'] == STATE_OK and value['percent'] > 70:
392 msg = 'Warning: your disk free inodes are running low.'
392 msg = 'Warning: your disk free inodes are running low.'
393 state = {'message': msg, 'type': STATE_WARN}
393 state = {'message': msg, 'type': STATE_WARN}
394
394
395 return SysInfoRes(value=value, state=state, human_value=human_value)
395 return SysInfoRes(value=value, state=state, human_value=human_value)
396
396
397
397
398 @register_sysinfo
398 @register_sysinfo
399 def storage_archives():
399 def storage_archives():
400 import rhodecode
400 import rhodecode
401 from rhodecode.lib.utils import safe_str
401 from rhodecode.lib.utils import safe_str
402 from rhodecode.lib.helpers import format_byte_size_binary
402 from rhodecode.lib.helpers import format_byte_size_binary
403
403
404 msg = 'Archive cache storage is controlled by ' \
404 msg = 'Archive cache storage is controlled by ' \
405 'archive_cache.store_dir=/path/to/cache option in the .ini file'
405 'archive_cache.store_dir=/path/to/cache option in the .ini file'
406 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
406 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
407
407
408 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
408 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
409 state = STATE_OK_DEFAULT
409 state = STATE_OK_DEFAULT
410 try:
410 try:
411 items_count = 0
411 items_count = 0
412 used = 0
412 used = 0
413 for root, dirs, files in os.walk(path):
413 for root, dirs, files in os.walk(path):
414 if root == path:
414 if root == path:
415 items_count = len(dirs)
415 items_count = len(dirs)
416
416
417 for f in files:
417 for f in files:
418 try:
418 try:
419 used += os.path.getsize(os.path.join(root, f))
419 used += os.path.getsize(os.path.join(root, f))
420 except OSError:
420 except OSError:
421 pass
421 pass
422 value.update({
422 value.update({
423 'percent': 100,
423 'percent': 100,
424 'used': used,
424 'used': used,
425 'total': used,
425 'total': used,
426 'items': items_count
426 'items': items_count
427 })
427 })
428
428
429 except Exception as e:
429 except Exception as e:
430 log.exception('failed to fetch archive cache storage')
430 log.exception('failed to fetch archive cache storage')
431 state = {'message': str(e), 'type': STATE_ERR}
431 state = {'message': str(e), 'type': STATE_ERR}
432
432
433 human_value = value.copy()
433 human_value = value.copy()
434 human_value['used'] = format_byte_size_binary(value['used'])
434 human_value['used'] = format_byte_size_binary(value['used'])
435 human_value['total'] = format_byte_size_binary(value['total'])
435 human_value['total'] = format_byte_size_binary(value['total'])
436 human_value['text'] = "{} ({} items)".format(
436 human_value['text'] = "{} ({} items)".format(
437 human_value['used'], value['items'])
437 human_value['used'], value['items'])
438
438
439 return SysInfoRes(value=value, state=state, human_value=human_value)
439 return SysInfoRes(value=value, state=state, human_value=human_value)
440
440
441
441
442 @register_sysinfo
442 @register_sysinfo
443 def storage_gist():
443 def storage_gist():
444 from rhodecode.model.gist import GIST_STORE_LOC
444 from rhodecode.model.gist import GIST_STORE_LOC
445 from rhodecode.model.settings import VcsSettingsModel
445 from rhodecode.lib.utils import safe_str, get_rhodecode_repo_store_path
446 from rhodecode.lib.utils import safe_str
447 from rhodecode.lib.helpers import format_byte_size_binary
446 from rhodecode.lib.helpers import format_byte_size_binary
448 path = safe_str(os.path.join(
447 path = safe_str(os.path.join(
449 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
448 get_rhodecode_repo_store_path(), GIST_STORE_LOC))
450
449
451 # gist storage
450 # gist storage
452 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
451 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
453 state = STATE_OK_DEFAULT
452 state = STATE_OK_DEFAULT
454
453
455 try:
454 try:
456 items_count = 0
455 items_count = 0
457 used = 0
456 used = 0
458 for root, dirs, files in os.walk(path):
457 for root, dirs, files in os.walk(path):
459 if root == path:
458 if root == path:
460 items_count = len(dirs)
459 items_count = len(dirs)
461
460
462 for f in files:
461 for f in files:
463 try:
462 try:
464 used += os.path.getsize(os.path.join(root, f))
463 used += os.path.getsize(os.path.join(root, f))
465 except OSError:
464 except OSError:
466 pass
465 pass
467 value.update({
466 value.update({
468 'percent': 100,
467 'percent': 100,
469 'used': used,
468 'used': used,
470 'total': used,
469 'total': used,
471 'items': items_count
470 'items': items_count
472 })
471 })
473 except Exception as e:
472 except Exception as e:
474 log.exception('failed to fetch gist storage items')
473 log.exception('failed to fetch gist storage items')
475 state = {'message': str(e), 'type': STATE_ERR}
474 state = {'message': str(e), 'type': STATE_ERR}
476
475
477 human_value = value.copy()
476 human_value = value.copy()
478 human_value['used'] = format_byte_size_binary(value['used'])
477 human_value['used'] = format_byte_size_binary(value['used'])
479 human_value['total'] = format_byte_size_binary(value['total'])
478 human_value['total'] = format_byte_size_binary(value['total'])
480 human_value['text'] = "{} ({} items)".format(
479 human_value['text'] = "{} ({} items)".format(
481 human_value['used'], value['items'])
480 human_value['used'], value['items'])
482
481
483 return SysInfoRes(value=value, state=state, human_value=human_value)
482 return SysInfoRes(value=value, state=state, human_value=human_value)
484
483
485
484
486 @register_sysinfo
485 @register_sysinfo
487 def storage_temp():
486 def storage_temp():
488 import tempfile
487 import tempfile
489 from rhodecode.lib.helpers import format_byte_size_binary
488 from rhodecode.lib.helpers import format_byte_size_binary
490
489
491 path = tempfile.gettempdir()
490 path = tempfile.gettempdir()
492 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
491 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
493 state = STATE_OK_DEFAULT
492 state = STATE_OK_DEFAULT
494
493
495 if not psutil:
494 if not psutil:
496 return SysInfoRes(value=value, state=state)
495 return SysInfoRes(value=value, state=state)
497
496
498 try:
497 try:
499 value.update(dict(psutil.disk_usage(path)._asdict()))
498 value.update(dict(psutil.disk_usage(path)._asdict()))
500 except Exception as e:
499 except Exception as e:
501 log.exception('Failed to fetch temp dir info')
500 log.exception('Failed to fetch temp dir info')
502 state = {'message': str(e), 'type': STATE_ERR}
501 state = {'message': str(e), 'type': STATE_ERR}
503
502
504 human_value = value.copy()
503 human_value = value.copy()
505 human_value['used'] = format_byte_size_binary(value['used'])
504 human_value['used'] = format_byte_size_binary(value['used'])
506 human_value['total'] = format_byte_size_binary(value['total'])
505 human_value['total'] = format_byte_size_binary(value['total'])
507 human_value['text'] = "{}/{}, {}% used".format(
506 human_value['text'] = "{}/{}, {}% used".format(
508 format_byte_size_binary(value['used']),
507 format_byte_size_binary(value['used']),
509 format_byte_size_binary(value['total']),
508 format_byte_size_binary(value['total']),
510 value['percent'])
509 value['percent'])
511
510
512 return SysInfoRes(value=value, state=state, human_value=human_value)
511 return SysInfoRes(value=value, state=state, human_value=human_value)
513
512
514
513
515 @register_sysinfo
514 @register_sysinfo
516 def search_info():
515 def search_info():
517 import rhodecode
516 import rhodecode
518 from rhodecode.lib.index import searcher_from_config
517 from rhodecode.lib.index import searcher_from_config
519
518
520 backend = rhodecode.CONFIG.get('search.module', '')
519 backend = rhodecode.CONFIG.get('search.module', '')
521 location = rhodecode.CONFIG.get('search.location', '')
520 location = rhodecode.CONFIG.get('search.location', '')
522
521
523 try:
522 try:
524 searcher = searcher_from_config(rhodecode.CONFIG)
523 searcher = searcher_from_config(rhodecode.CONFIG)
525 searcher = searcher.__class__.__name__
524 searcher = searcher.__class__.__name__
526 except Exception:
525 except Exception:
527 searcher = None
526 searcher = None
528
527
529 value = dict(
528 value = dict(
530 backend=backend, searcher=searcher, location=location, text='')
529 backend=backend, searcher=searcher, location=location, text='')
531 state = STATE_OK_DEFAULT
530 state = STATE_OK_DEFAULT
532
531
533 human_value = value.copy()
532 human_value = value.copy()
534 human_value['text'] = "backend:`{}`".format(human_value['backend'])
533 human_value['text'] = "backend:`{}`".format(human_value['backend'])
535
534
536 return SysInfoRes(value=value, state=state, human_value=human_value)
535 return SysInfoRes(value=value, state=state, human_value=human_value)
537
536
538
537
539 @register_sysinfo
538 @register_sysinfo
540 def git_info():
539 def git_info():
541 from rhodecode.lib.vcs.backends import git
540 from rhodecode.lib.vcs.backends import git
542 state = STATE_OK_DEFAULT
541 state = STATE_OK_DEFAULT
543 value = human_value = ''
542 value = human_value = ''
544 try:
543 try:
545 value = git.discover_git_version(raise_on_exc=True)
544 value = git.discover_git_version(raise_on_exc=True)
546 human_value = f'version reported from VCSServer: {value}'
545 human_value = f'version reported from VCSServer: {value}'
547 except Exception as e:
546 except Exception as e:
548 state = {'message': str(e), 'type': STATE_ERR}
547 state = {'message': str(e), 'type': STATE_ERR}
549
548
550 return SysInfoRes(value=value, state=state, human_value=human_value)
549 return SysInfoRes(value=value, state=state, human_value=human_value)
551
550
552
551
553 @register_sysinfo
552 @register_sysinfo
554 def hg_info():
553 def hg_info():
555 from rhodecode.lib.vcs.backends import hg
554 from rhodecode.lib.vcs.backends import hg
556 state = STATE_OK_DEFAULT
555 state = STATE_OK_DEFAULT
557 value = human_value = ''
556 value = human_value = ''
558 try:
557 try:
559 value = hg.discover_hg_version(raise_on_exc=True)
558 value = hg.discover_hg_version(raise_on_exc=True)
560 human_value = f'version reported from VCSServer: {value}'
559 human_value = f'version reported from VCSServer: {value}'
561 except Exception as e:
560 except Exception as e:
562 state = {'message': str(e), 'type': STATE_ERR}
561 state = {'message': str(e), 'type': STATE_ERR}
563 return SysInfoRes(value=value, state=state, human_value=human_value)
562 return SysInfoRes(value=value, state=state, human_value=human_value)
564
563
565
564
566 @register_sysinfo
565 @register_sysinfo
567 def svn_info():
566 def svn_info():
568 from rhodecode.lib.vcs.backends import svn
567 from rhodecode.lib.vcs.backends import svn
569 state = STATE_OK_DEFAULT
568 state = STATE_OK_DEFAULT
570 value = human_value = ''
569 value = human_value = ''
571 try:
570 try:
572 value = svn.discover_svn_version(raise_on_exc=True)
571 value = svn.discover_svn_version(raise_on_exc=True)
573 human_value = f'version reported from VCSServer: {value}'
572 human_value = f'version reported from VCSServer: {value}'
574 except Exception as e:
573 except Exception as e:
575 state = {'message': str(e), 'type': STATE_ERR}
574 state = {'message': str(e), 'type': STATE_ERR}
576 return SysInfoRes(value=value, state=state, human_value=human_value)
575 return SysInfoRes(value=value, state=state, human_value=human_value)
577
576
578
577
579 @register_sysinfo
578 @register_sysinfo
580 def vcs_backends():
579 def vcs_backends():
581 import rhodecode
580 import rhodecode
582 value = rhodecode.CONFIG.get('vcs.backends')
581 value = rhodecode.CONFIG.get('vcs.backends')
583 human_value = 'Enabled backends in order: {}'.format(','.join(value))
582 human_value = 'Enabled backends in order: {}'.format(','.join(value))
584 return SysInfoRes(value=value, human_value=human_value)
583 return SysInfoRes(value=value, human_value=human_value)
585
584
586
585
587 @register_sysinfo
586 @register_sysinfo
588 def vcs_server():
587 def vcs_server():
589 import rhodecode
588 import rhodecode
590 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
589 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
591
590
592 server_url = rhodecode.CONFIG.get('vcs.server')
591 server_url = rhodecode.CONFIG.get('vcs.server')
593 enabled = rhodecode.CONFIG.get('vcs.server.enable')
592 enabled = rhodecode.CONFIG.get('vcs.server.enable')
594 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
593 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
595 state = STATE_OK_DEFAULT
594 state = STATE_OK_DEFAULT
596 version = None
595 version = None
597 workers = 0
596 workers = 0
598
597
599 try:
598 try:
600 data = get_vcsserver_service_data()
599 data = get_vcsserver_service_data()
601 if data and 'version' in data:
600 if data and 'version' in data:
602 version = data['version']
601 version = data['version']
603
602
604 if data and 'config' in data:
603 if data and 'config' in data:
605 conf = data['config']
604 conf = data['config']
606 workers = conf.get('workers', 'NOT AVAILABLE')
605 workers = conf.get('workers', 'NOT AVAILABLE')
607
606
608 connection = 'connected'
607 connection = 'connected'
609 except Exception as e:
608 except Exception as e:
610 connection = 'failed'
609 connection = 'failed'
611 state = {'message': str(e), 'type': STATE_ERR}
610 state = {'message': str(e), 'type': STATE_ERR}
612
611
613 value = dict(
612 value = dict(
614 url=server_url,
613 url=server_url,
615 enabled=enabled,
614 enabled=enabled,
616 protocol=protocol,
615 protocol=protocol,
617 connection=connection,
616 connection=connection,
618 version=version,
617 version=version,
619 text='',
618 text='',
620 )
619 )
621
620
622 human_value = value.copy()
621 human_value = value.copy()
623 human_value['text'] = \
622 human_value['text'] = \
624 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
623 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
625 url=server_url, ver=version, workers=workers, mode=protocol,
624 url=server_url, ver=version, workers=workers, mode=protocol,
626 conn=connection)
625 conn=connection)
627
626
628 return SysInfoRes(value=value, state=state, human_value=human_value)
627 return SysInfoRes(value=value, state=state, human_value=human_value)
629
628
630
629
631 @register_sysinfo
630 @register_sysinfo
632 def vcs_server_config():
631 def vcs_server_config():
633 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
632 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
634 state = STATE_OK_DEFAULT
633 state = STATE_OK_DEFAULT
635
634
636 value = {}
635 value = {}
637 try:
636 try:
638 data = get_vcsserver_service_data()
637 data = get_vcsserver_service_data()
639 value = data['app_config']
638 value = data['app_config']
640 except Exception as e:
639 except Exception as e:
641 state = {'message': str(e), 'type': STATE_ERR}
640 state = {'message': str(e), 'type': STATE_ERR}
642
641
643 human_value = value.copy()
642 human_value = value.copy()
644 human_value['text'] = 'VCS Server config'
643 human_value['text'] = 'VCS Server config'
645
644
646 return SysInfoRes(value=value, state=state, human_value=human_value)
645 return SysInfoRes(value=value, state=state, human_value=human_value)
647
646
648
647
649 @register_sysinfo
648 @register_sysinfo
650 def rhodecode_app_info():
649 def rhodecode_app_info():
651 import rhodecode
650 import rhodecode
652 edition = rhodecode.CONFIG.get('rhodecode.edition')
651 edition = rhodecode.CONFIG.get('rhodecode.edition')
653
652
654 value = dict(
653 value = dict(
655 rhodecode_version=rhodecode.__version__,
654 rhodecode_version=rhodecode.__version__,
656 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
655 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
657 text=''
656 text=''
658 )
657 )
659 human_value = value.copy()
658 human_value = value.copy()
660 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
659 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
661 edition=edition, ver=value['rhodecode_version']
660 edition=edition, ver=value['rhodecode_version']
662 )
661 )
663 return SysInfoRes(value=value, human_value=human_value)
662 return SysInfoRes(value=value, human_value=human_value)
664
663
665
664
666 @register_sysinfo
665 @register_sysinfo
667 def rhodecode_config():
666 def rhodecode_config():
668 import rhodecode
667 import rhodecode
669 path = rhodecode.CONFIG.get('__file__')
668 path = rhodecode.CONFIG.get('__file__')
670 rhodecode_ini_safe = rhodecode.CONFIG.copy()
669 rhodecode_ini_safe = rhodecode.CONFIG.copy()
671 cert_path = get_cert_path(path)
670 cert_path = get_cert_path(path)
672
671
673 try:
672 try:
674 config = configparser.ConfigParser()
673 config = configparser.ConfigParser()
675 config.read(path)
674 config.read(path)
676 parsed_ini = config
675 parsed_ini = config
677 if parsed_ini.has_section('server:main'):
676 if parsed_ini.has_section('server:main'):
678 parsed_ini = dict(parsed_ini.items('server:main'))
677 parsed_ini = dict(parsed_ini.items('server:main'))
679 except Exception:
678 except Exception:
680 log.exception('Failed to read .ini file for display')
679 log.exception('Failed to read .ini file for display')
681 parsed_ini = {}
680 parsed_ini = {}
682
681
683 rhodecode_ini_safe['server:main'] = parsed_ini
682 rhodecode_ini_safe['server:main'] = parsed_ini
684
683
685 blacklist = [
684 blacklist = [
686 f'rhodecode_{LicenseModel.LICENSE_DB_KEY}',
685 f'rhodecode_{LicenseModel.LICENSE_DB_KEY}',
687 'routes.map',
686 'routes.map',
688 'sqlalchemy.db1.url',
687 'sqlalchemy.db1.url',
689 'channelstream.secret',
688 'channelstream.secret',
690 'beaker.session.secret',
689 'beaker.session.secret',
691 'rhodecode.encrypted_values.secret',
690 'rhodecode.encrypted_values.secret',
692 'rhodecode_auth_github_consumer_key',
691 'rhodecode_auth_github_consumer_key',
693 'rhodecode_auth_github_consumer_secret',
692 'rhodecode_auth_github_consumer_secret',
694 'rhodecode_auth_google_consumer_key',
693 'rhodecode_auth_google_consumer_key',
695 'rhodecode_auth_google_consumer_secret',
694 'rhodecode_auth_google_consumer_secret',
696 'rhodecode_auth_bitbucket_consumer_secret',
695 'rhodecode_auth_bitbucket_consumer_secret',
697 'rhodecode_auth_bitbucket_consumer_key',
696 'rhodecode_auth_bitbucket_consumer_key',
698 'rhodecode_auth_twitter_consumer_secret',
697 'rhodecode_auth_twitter_consumer_secret',
699 'rhodecode_auth_twitter_consumer_key',
698 'rhodecode_auth_twitter_consumer_key',
700
699
701 'rhodecode_auth_twitter_secret',
700 'rhodecode_auth_twitter_secret',
702 'rhodecode_auth_github_secret',
701 'rhodecode_auth_github_secret',
703 'rhodecode_auth_google_secret',
702 'rhodecode_auth_google_secret',
704 'rhodecode_auth_bitbucket_secret',
703 'rhodecode_auth_bitbucket_secret',
705
704
706 'appenlight.api_key',
705 'appenlight.api_key',
707 ('app_conf', 'sqlalchemy.db1.url')
706 ('app_conf', 'sqlalchemy.db1.url')
708 ]
707 ]
709 for k in blacklist:
708 for k in blacklist:
710 if isinstance(k, tuple):
709 if isinstance(k, tuple):
711 section, key = k
710 section, key = k
712 if section in rhodecode_ini_safe:
711 if section in rhodecode_ini_safe:
713 rhodecode_ini_safe[section] = '**OBFUSCATED**'
712 rhodecode_ini_safe[section] = '**OBFUSCATED**'
714 else:
713 else:
715 rhodecode_ini_safe.pop(k, None)
714 rhodecode_ini_safe.pop(k, None)
716
715
717 # TODO: maybe put some CONFIG checks here ?
716 # TODO: maybe put some CONFIG checks here ?
718 return SysInfoRes(value={'config': rhodecode_ini_safe,
717 return SysInfoRes(value={'config': rhodecode_ini_safe,
719 'path': path, 'cert_path': cert_path})
718 'path': path, 'cert_path': cert_path})
720
719
721
720
722 @register_sysinfo
721 @register_sysinfo
723 def database_info():
722 def database_info():
724 import rhodecode
723 import rhodecode
725 from sqlalchemy.engine import url as engine_url
724 from sqlalchemy.engine import url as engine_url
726 from rhodecode.model import meta
725 from rhodecode.model import meta
727 from rhodecode.model.meta import Session
726 from rhodecode.model.meta import Session
728 from rhodecode.model.db import DbMigrateVersion
727 from rhodecode.model.db import DbMigrateVersion
729
728
730 state = STATE_OK_DEFAULT
729 state = STATE_OK_DEFAULT
731
730
732 db_migrate = DbMigrateVersion.query().filter(
731 db_migrate = DbMigrateVersion.query().filter(
733 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
732 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
734
733
735 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
734 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
736
735
737 try:
736 try:
738 engine = meta.get_engine()
737 engine = meta.get_engine()
739 db_server_info = engine.dialect._get_server_version_info(
738 db_server_info = engine.dialect._get_server_version_info(
740 Session.connection(bind=engine))
739 Session.connection(bind=engine))
741 db_version = '.'.join(map(str, db_server_info))
740 db_version = '.'.join(map(str, db_server_info))
742 except Exception:
741 except Exception:
743 log.exception('failed to fetch db version')
742 log.exception('failed to fetch db version')
744 db_version = 'UNKNOWN'
743 db_version = 'UNKNOWN'
745
744
746 db_info = dict(
745 db_info = dict(
747 migrate_version=db_migrate.version,
746 migrate_version=db_migrate.version,
748 type=db_url_obj.get_backend_name(),
747 type=db_url_obj.get_backend_name(),
749 version=db_version,
748 version=db_version,
750 url=repr(db_url_obj)
749 url=repr(db_url_obj)
751 )
750 )
752 current_version = db_migrate.version
751 current_version = db_migrate.version
753 expected_version = rhodecode.__dbversion__
752 expected_version = rhodecode.__dbversion__
754 if state['type'] == STATE_OK and current_version != expected_version:
753 if state['type'] == STATE_OK and current_version != expected_version:
755 msg = 'Critical: database schema mismatch, ' \
754 msg = 'Critical: database schema mismatch, ' \
756 'expected version {}, got {}. ' \
755 'expected version {}, got {}. ' \
757 'Please run migrations on your database.'.format(
756 'Please run migrations on your database.'.format(
758 expected_version, current_version)
757 expected_version, current_version)
759 state = {'message': msg, 'type': STATE_ERR}
758 state = {'message': msg, 'type': STATE_ERR}
760
759
761 human_value = db_info.copy()
760 human_value = db_info.copy()
762 human_value['url'] = "{} @ migration version: {}".format(
761 human_value['url'] = "{} @ migration version: {}".format(
763 db_info['url'], db_info['migrate_version'])
762 db_info['url'], db_info['migrate_version'])
764 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
763 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
765 return SysInfoRes(value=db_info, state=state, human_value=human_value)
764 return SysInfoRes(value=db_info, state=state, human_value=human_value)
766
765
767
766
768 @register_sysinfo
767 @register_sysinfo
769 def server_info(environ):
768 def server_info(environ):
770 import rhodecode
769 import rhodecode
771 from rhodecode.lib.base import get_server_ip_addr, get_server_port
770 from rhodecode.lib.base import get_server_ip_addr, get_server_port
772
771
773 value = {
772 value = {
774 'server_ip': '{}:{}'.format(
773 'server_ip': '{}:{}'.format(
775 get_server_ip_addr(environ, log_errors=False),
774 get_server_ip_addr(environ, log_errors=False),
776 get_server_port(environ)
775 get_server_port(environ)
777 ),
776 ),
778 'server_id': rhodecode.CONFIG.get('instance_id'),
777 'server_id': rhodecode.CONFIG.get('instance_id'),
779 }
778 }
780 return SysInfoRes(value=value)
779 return SysInfoRes(value=value)
781
780
782
781
783 @register_sysinfo
782 @register_sysinfo
784 def usage_info():
783 def usage_info():
785 from rhodecode.model.db import User, Repository, true
784 from rhodecode.model.db import User, Repository, true
786 value = {
785 value = {
787 'users': User.query().count(),
786 'users': User.query().count(),
788 'users_active': User.query().filter(User.active == true()).count(),
787 'users_active': User.query().filter(User.active == true()).count(),
789 'repositories': Repository.query().count(),
788 'repositories': Repository.query().count(),
790 'repository_types': {
789 'repository_types': {
791 'hg': Repository.query().filter(
790 'hg': Repository.query().filter(
792 Repository.repo_type == 'hg').count(),
791 Repository.repo_type == 'hg').count(),
793 'git': Repository.query().filter(
792 'git': Repository.query().filter(
794 Repository.repo_type == 'git').count(),
793 Repository.repo_type == 'git').count(),
795 'svn': Repository.query().filter(
794 'svn': Repository.query().filter(
796 Repository.repo_type == 'svn').count(),
795 Repository.repo_type == 'svn').count(),
797 },
796 },
798 }
797 }
799 return SysInfoRes(value=value)
798 return SysInfoRes(value=value)
800
799
801
800
802 def get_system_info(environ):
801 def get_system_info(environ):
803 environ = environ or {}
802 environ = environ or {}
804 return {
803 return {
805 'rhodecode_app': SysInfo(rhodecode_app_info)(),
804 'rhodecode_app': SysInfo(rhodecode_app_info)(),
806 'rhodecode_config': SysInfo(rhodecode_config)(),
805 'rhodecode_config': SysInfo(rhodecode_config)(),
807 'rhodecode_usage': SysInfo(usage_info)(),
806 'rhodecode_usage': SysInfo(usage_info)(),
808 'python': SysInfo(python_info)(),
807 'python': SysInfo(python_info)(),
809 'py_modules': SysInfo(py_modules)(),
808 'py_modules': SysInfo(py_modules)(),
810
809
811 'platform': SysInfo(platform_type)(),
810 'platform': SysInfo(platform_type)(),
812 'locale': SysInfo(locale_info)(),
811 'locale': SysInfo(locale_info)(),
813 'server': SysInfo(server_info, environ=environ)(),
812 'server': SysInfo(server_info, environ=environ)(),
814 'database': SysInfo(database_info)(),
813 'database': SysInfo(database_info)(),
815 'ulimit': SysInfo(ulimit_info)(),
814 'ulimit': SysInfo(ulimit_info)(),
816 'storage': SysInfo(storage)(),
815 'storage': SysInfo(storage)(),
817 'storage_inodes': SysInfo(storage_inodes)(),
816 'storage_inodes': SysInfo(storage_inodes)(),
818 'storage_archive': SysInfo(storage_archives)(),
817 'storage_archive': SysInfo(storage_archives)(),
819 'storage_gist': SysInfo(storage_gist)(),
818 'storage_gist': SysInfo(storage_gist)(),
820 'storage_temp': SysInfo(storage_temp)(),
819 'storage_temp': SysInfo(storage_temp)(),
821
820
822 'search': SysInfo(search_info)(),
821 'search': SysInfo(search_info)(),
823
822
824 'uptime': SysInfo(uptime)(),
823 'uptime': SysInfo(uptime)(),
825 'load': SysInfo(machine_load)(),
824 'load': SysInfo(machine_load)(),
826 'cpu': SysInfo(cpu)(),
825 'cpu': SysInfo(cpu)(),
827 'memory': SysInfo(memory)(),
826 'memory': SysInfo(memory)(),
828
827
829 'vcs_backends': SysInfo(vcs_backends)(),
828 'vcs_backends': SysInfo(vcs_backends)(),
830 'vcs_server': SysInfo(vcs_server)(),
829 'vcs_server': SysInfo(vcs_server)(),
831
830
832 'vcs_server_config': SysInfo(vcs_server_config)(),
831 'vcs_server_config': SysInfo(vcs_server_config)(),
833
832
834 'git': SysInfo(git_info)(),
833 'git': SysInfo(git_info)(),
835 'hg': SysInfo(hg_info)(),
834 'hg': SysInfo(hg_info)(),
836 'svn': SysInfo(svn_info)(),
835 'svn': SysInfo(svn_info)(),
837 }
836 }
838
837
839
838
840 def load_system_info(key):
839 def load_system_info(key):
841 """
840 """
842 get_sys_info('vcs_server')
841 get_sys_info('vcs_server')
843 get_sys_info('database')
842 get_sys_info('database')
844 """
843 """
845 return SysInfo(registered_helpers[key])()
844 return SysInfo(registered_helpers[key])()
@@ -1,824 +1,824 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Utilities library for RhodeCode
20 Utilities library for RhodeCode
21 """
21 """
22
22
23 import datetime
23 import datetime
24
24
25 import decorator
25 import decorator
26 import logging
26 import logging
27 import os
27 import os
28 import re
28 import re
29 import sys
29 import sys
30 import shutil
30 import shutil
31 import socket
31 import socket
32 import tempfile
32 import tempfile
33 import traceback
33 import traceback
34 import tarfile
34 import tarfile
35
35
36 from functools import wraps
36 from functools import wraps
37 from os.path import join as jn
37 from os.path import join as jn
38
38
39 import paste
39 import paste
40 import pkg_resources
40 import pkg_resources
41 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
41 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
42
42
43 from mako import exceptions
43 from mako import exceptions
44
44
45 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
45 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
46 from rhodecode.lib.type_utils import AttributeDict
46 from rhodecode.lib.type_utils import AttributeDict
47 from rhodecode.lib.str_utils import safe_bytes, safe_str
47 from rhodecode.lib.str_utils import safe_bytes, safe_str
48 from rhodecode.lib.vcs.backends.base import Config
48 from rhodecode.lib.vcs.backends.base import Config
49 from rhodecode.lib.vcs.exceptions import VCSError
49 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
51 from rhodecode.lib.ext_json import sjson as json
51 from rhodecode.lib.ext_json import sjson as json
52 from rhodecode.model import meta
52 from rhodecode.model import meta
53 from rhodecode.model.db import (
53 from rhodecode.model.db import (
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56
56
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61
61
62 # String which contains characters that are not allowed in slug names for
62 # String which contains characters that are not allowed in slug names for
63 # repositories or repository groups. It is properly escaped to use it in
63 # repositories or repository groups. It is properly escaped to use it in
64 # regular expressions.
64 # regular expressions.
65 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
65 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66
66
67 # Regex that matches forbidden characters in repo/group slugs.
67 # Regex that matches forbidden characters in repo/group slugs.
68 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
68 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
69
69
70 # Regex that matches allowed characters in repo/group slugs.
70 # Regex that matches allowed characters in repo/group slugs.
71 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
71 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
72
72
73 # Regex that matches whole repo/group slugs.
73 # Regex that matches whole repo/group slugs.
74 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
74 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
75
75
76 _license_cache = None
76 _license_cache = None
77
77
78
78
79 def adopt_for_celery(func):
79 def adopt_for_celery(func):
80 """
80 """
81 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
81 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
82 for further usage as a celery tasks.
82 for further usage as a celery tasks.
83 """
83 """
84 @wraps(func)
84 @wraps(func)
85 def wrapper(extras):
85 def wrapper(extras):
86 extras = AttributeDict(extras)
86 extras = AttributeDict(extras)
87 # HooksResponse implements to_json method which must be used there.
87 # HooksResponse implements to_json method which must be used there.
88 return func(extras).to_json()
88 return func(extras).to_json()
89 return wrapper
89 return wrapper
90
90
91
91
92 def repo_name_slug(value):
92 def repo_name_slug(value):
93 """
93 """
94 Return slug of name of repository
94 Return slug of name of repository
95 This function is called on each creation/modification
95 This function is called on each creation/modification
96 of repository to prevent bad names in repo
96 of repository to prevent bad names in repo
97 """
97 """
98
98
99 replacement_char = '-'
99 replacement_char = '-'
100
100
101 slug = strip_tags(value)
101 slug = strip_tags(value)
102 slug = convert_accented_entities(slug)
102 slug = convert_accented_entities(slug)
103 slug = convert_misc_entities(slug)
103 slug = convert_misc_entities(slug)
104
104
105 slug = SLUG_BAD_CHAR_RE.sub('', slug)
105 slug = SLUG_BAD_CHAR_RE.sub('', slug)
106 slug = re.sub(r'[\s]+', '-', slug)
106 slug = re.sub(r'[\s]+', '-', slug)
107 slug = collapse(slug, replacement_char)
107 slug = collapse(slug, replacement_char)
108
108
109 return slug
109 return slug
110
110
111
111
112 #==============================================================================
112 #==============================================================================
113 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
113 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
114 #==============================================================================
114 #==============================================================================
115 def get_repo_slug(request):
115 def get_repo_slug(request):
116 _repo = ''
116 _repo = ''
117
117
118 if hasattr(request, 'db_repo_name'):
118 if hasattr(request, 'db_repo_name'):
119 # if our requests has set db reference use it for name, this
119 # if our requests has set db reference use it for name, this
120 # translates the example.com/_<id> into proper repo names
120 # translates the example.com/_<id> into proper repo names
121 _repo = request.db_repo_name
121 _repo = request.db_repo_name
122 elif getattr(request, 'matchdict', None):
122 elif getattr(request, 'matchdict', None):
123 # pyramid
123 # pyramid
124 _repo = request.matchdict.get('repo_name')
124 _repo = request.matchdict.get('repo_name')
125
125
126 if _repo:
126 if _repo:
127 _repo = _repo.rstrip('/')
127 _repo = _repo.rstrip('/')
128 return _repo
128 return _repo
129
129
130
130
131 def get_repo_group_slug(request):
131 def get_repo_group_slug(request):
132 _group = ''
132 _group = ''
133 if hasattr(request, 'db_repo_group'):
133 if hasattr(request, 'db_repo_group'):
134 # if our requests has set db reference use it for name, this
134 # if our requests has set db reference use it for name, this
135 # translates the example.com/_<id> into proper repo group names
135 # translates the example.com/_<id> into proper repo group names
136 _group = request.db_repo_group.group_name
136 _group = request.db_repo_group.group_name
137 elif getattr(request, 'matchdict', None):
137 elif getattr(request, 'matchdict', None):
138 # pyramid
138 # pyramid
139 _group = request.matchdict.get('repo_group_name')
139 _group = request.matchdict.get('repo_group_name')
140
140
141 if _group:
141 if _group:
142 _group = _group.rstrip('/')
142 _group = _group.rstrip('/')
143 return _group
143 return _group
144
144
145
145
146 def get_user_group_slug(request):
146 def get_user_group_slug(request):
147 _user_group = ''
147 _user_group = ''
148
148
149 if hasattr(request, 'db_user_group'):
149 if hasattr(request, 'db_user_group'):
150 _user_group = request.db_user_group.users_group_name
150 _user_group = request.db_user_group.users_group_name
151 elif getattr(request, 'matchdict', None):
151 elif getattr(request, 'matchdict', None):
152 # pyramid
152 # pyramid
153 _user_group = request.matchdict.get('user_group_id')
153 _user_group = request.matchdict.get('user_group_id')
154 _user_group_name = request.matchdict.get('user_group_name')
154 _user_group_name = request.matchdict.get('user_group_name')
155 try:
155 try:
156 if _user_group:
156 if _user_group:
157 _user_group = UserGroup.get(_user_group)
157 _user_group = UserGroup.get(_user_group)
158 elif _user_group_name:
158 elif _user_group_name:
159 _user_group = UserGroup.get_by_group_name(_user_group_name)
159 _user_group = UserGroup.get_by_group_name(_user_group_name)
160
160
161 if _user_group:
161 if _user_group:
162 _user_group = _user_group.users_group_name
162 _user_group = _user_group.users_group_name
163 except Exception:
163 except Exception:
164 log.exception('Failed to get user group by id and name')
164 log.exception('Failed to get user group by id and name')
165 # catch all failures here
165 # catch all failures here
166 return None
166 return None
167
167
168 return _user_group
168 return _user_group
169
169
170
170
171 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
171 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
172 """
172 """
173 Scans given path for repos and return (name,(type,path)) tuple
173 Scans given path for repos and return (name,(type,path)) tuple
174
174
175 :param path: path to scan for repositories
175 :param path: path to scan for repositories
176 :param recursive: recursive search and return names with subdirs in front
176 :param recursive: recursive search and return names with subdirs in front
177 """
177 """
178
178
179 # remove ending slash for better results
179 # remove ending slash for better results
180 path = path.rstrip(os.sep)
180 path = path.rstrip(os.sep)
181 log.debug('now scanning in %s location recursive:%s...', path, recursive)
181 log.debug('now scanning in %s location recursive:%s...', path, recursive)
182
182
183 def _get_repos(p):
183 def _get_repos(p):
184 dirpaths = get_dirpaths(p)
184 dirpaths = get_dirpaths(p)
185 if not _is_dir_writable(p):
185 if not _is_dir_writable(p):
186 log.warning('repo path without write access: %s', p)
186 log.warning('repo path without write access: %s', p)
187
187
188 for dirpath in dirpaths:
188 for dirpath in dirpaths:
189 if os.path.isfile(os.path.join(p, dirpath)):
189 if os.path.isfile(os.path.join(p, dirpath)):
190 continue
190 continue
191 cur_path = os.path.join(p, dirpath)
191 cur_path = os.path.join(p, dirpath)
192
192
193 # skip removed repos
193 # skip removed repos
194 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
194 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
195 continue
195 continue
196
196
197 #skip .<somethin> dirs
197 #skip .<somethin> dirs
198 if dirpath.startswith('.'):
198 if dirpath.startswith('.'):
199 continue
199 continue
200
200
201 try:
201 try:
202 scm_info = get_scm(cur_path)
202 scm_info = get_scm(cur_path)
203 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
203 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
204 except VCSError:
204 except VCSError:
205 if not recursive:
205 if not recursive:
206 continue
206 continue
207 #check if this dir containts other repos for recursive scan
207 #check if this dir containts other repos for recursive scan
208 rec_path = os.path.join(p, dirpath)
208 rec_path = os.path.join(p, dirpath)
209 if os.path.isdir(rec_path):
209 if os.path.isdir(rec_path):
210 yield from _get_repos(rec_path)
210 yield from _get_repos(rec_path)
211
211
212 return _get_repos(path)
212 return _get_repos(path)
213
213
214
214
215 def get_dirpaths(p: str) -> list:
215 def get_dirpaths(p: str) -> list:
216 try:
216 try:
217 # OS-independable way of checking if we have at least read-only
217 # OS-independable way of checking if we have at least read-only
218 # access or not.
218 # access or not.
219 dirpaths = os.listdir(p)
219 dirpaths = os.listdir(p)
220 except OSError:
220 except OSError:
221 log.warning('ignoring repo path without read access: %s', p)
221 log.warning('ignoring repo path without read access: %s', p)
222 return []
222 return []
223
223
224 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
224 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
225 # decode paths and suddenly returns unicode objects itself. The items it
225 # decode paths and suddenly returns unicode objects itself. The items it
226 # cannot decode are returned as strings and cause issues.
226 # cannot decode are returned as strings and cause issues.
227 #
227 #
228 # Those paths are ignored here until a solid solution for path handling has
228 # Those paths are ignored here until a solid solution for path handling has
229 # been built.
229 # been built.
230 expected_type = type(p)
230 expected_type = type(p)
231
231
232 def _has_correct_type(item):
232 def _has_correct_type(item):
233 if type(item) is not expected_type:
233 if type(item) is not expected_type:
234 log.error(
234 log.error(
235 "Ignoring path %s since it cannot be decoded into str.",
235 "Ignoring path %s since it cannot be decoded into str.",
236 # Using "repr" to make sure that we see the byte value in case
236 # Using "repr" to make sure that we see the byte value in case
237 # of support.
237 # of support.
238 repr(item))
238 repr(item))
239 return False
239 return False
240 return True
240 return True
241
241
242 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
242 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
243
243
244 return dirpaths
244 return dirpaths
245
245
246
246
247 def _is_dir_writable(path):
247 def _is_dir_writable(path):
248 """
248 """
249 Probe if `path` is writable.
249 Probe if `path` is writable.
250
250
251 Due to trouble on Cygwin / Windows, this is actually probing if it is
251 Due to trouble on Cygwin / Windows, this is actually probing if it is
252 possible to create a file inside of `path`, stat does not produce reliable
252 possible to create a file inside of `path`, stat does not produce reliable
253 results in this case.
253 results in this case.
254 """
254 """
255 try:
255 try:
256 with tempfile.TemporaryFile(dir=path):
256 with tempfile.TemporaryFile(dir=path):
257 pass
257 pass
258 except OSError:
258 except OSError:
259 return False
259 return False
260 return True
260 return True
261
261
262
262
263 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
263 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
264 """
264 """
265 Returns True if given path is a valid repository False otherwise.
265 Returns True if given path is a valid repository False otherwise.
266 If expect_scm param is given also, compare if given scm is the same
266 If expect_scm param is given also, compare if given scm is the same
267 as expected from scm parameter. If explicit_scm is given don't try to
267 as expected from scm parameter. If explicit_scm is given don't try to
268 detect the scm, just use the given one to check if repo is valid
268 detect the scm, just use the given one to check if repo is valid
269
269
270 :param repo_name:
270 :param repo_name:
271 :param base_path:
271 :param base_path:
272 :param expect_scm:
272 :param expect_scm:
273 :param explicit_scm:
273 :param explicit_scm:
274 :param config:
274 :param config:
275
275
276 :return True: if given path is a valid repository
276 :return True: if given path is a valid repository
277 """
277 """
278 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
278 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
279 log.debug('Checking if `%s` is a valid path for repository. '
279 log.debug('Checking if `%s` is a valid path for repository. '
280 'Explicit type: %s', repo_name, explicit_scm)
280 'Explicit type: %s', repo_name, explicit_scm)
281
281
282 try:
282 try:
283 if explicit_scm:
283 if explicit_scm:
284 detected_scms = [get_scm_backend(explicit_scm)(
284 detected_scms = [get_scm_backend(explicit_scm)(
285 full_path, config=config).alias]
285 full_path, config=config).alias]
286 else:
286 else:
287 detected_scms = get_scm(full_path)
287 detected_scms = get_scm(full_path)
288
288
289 if expect_scm:
289 if expect_scm:
290 return detected_scms[0] == expect_scm
290 return detected_scms[0] == expect_scm
291 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
291 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
292 return True
292 return True
293 except VCSError:
293 except VCSError:
294 log.debug('path: %s is not a valid repo !', full_path)
294 log.debug('path: %s is not a valid repo !', full_path)
295 return False
295 return False
296
296
297
297
298 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
298 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
299 """
299 """
300 Returns True if a given path is a repository group, False otherwise
300 Returns True if a given path is a repository group, False otherwise
301
301
302 :param repo_group_name:
302 :param repo_group_name:
303 :param base_path:
303 :param base_path:
304 """
304 """
305 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
305 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
306 log.debug('Checking if `%s` is a valid path for repository group',
306 log.debug('Checking if `%s` is a valid path for repository group',
307 repo_group_name)
307 repo_group_name)
308
308
309 # check if it's not a repo
309 # check if it's not a repo
310 if is_valid_repo(repo_group_name, base_path):
310 if is_valid_repo(repo_group_name, base_path):
311 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
311 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
312 return False
312 return False
313
313
314 try:
314 try:
315 # we need to check bare git repos at higher level
315 # we need to check bare git repos at higher level
316 # since we might match branches/hooks/info/objects or possible
316 # since we might match branches/hooks/info/objects or possible
317 # other things inside bare git repo
317 # other things inside bare git repo
318 maybe_repo = os.path.dirname(full_path)
318 maybe_repo = os.path.dirname(full_path)
319 if maybe_repo == base_path:
319 if maybe_repo == base_path:
320 # skip root level repo check; we know root location CANNOT BE a repo group
320 # skip root level repo check; we know root location CANNOT BE a repo group
321 return False
321 return False
322
322
323 scm_ = get_scm(maybe_repo)
323 scm_ = get_scm(maybe_repo)
324 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
324 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
325 return False
325 return False
326 except VCSError:
326 except VCSError:
327 pass
327 pass
328
328
329 # check if it's a valid path
329 # check if it's a valid path
330 if skip_path_check or os.path.isdir(full_path):
330 if skip_path_check or os.path.isdir(full_path):
331 log.debug('path: %s is a valid repo group !', full_path)
331 log.debug('path: %s is a valid repo group !', full_path)
332 return True
332 return True
333
333
334 log.debug('path: %s is not a valid repo group !', full_path)
334 log.debug('path: %s is not a valid repo group !', full_path)
335 return False
335 return False
336
336
337
337
338 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
338 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
339 while True:
339 while True:
340 ok = input(prompt)
340 ok = input(prompt)
341 if ok.lower() in ('y', 'ye', 'yes'):
341 if ok.lower() in ('y', 'ye', 'yes'):
342 return True
342 return True
343 if ok.lower() in ('n', 'no', 'nop', 'nope'):
343 if ok.lower() in ('n', 'no', 'nop', 'nope'):
344 return False
344 return False
345 retries = retries - 1
345 retries = retries - 1
346 if retries < 0:
346 if retries < 0:
347 raise OSError
347 raise OSError
348 print(complaint)
348 print(complaint)
349
349
350 # propagated from mercurial documentation
350 # propagated from mercurial documentation
351 ui_sections = [
351 ui_sections = [
352 'alias', 'auth',
352 'alias', 'auth',
353 'decode/encode', 'defaults',
353 'decode/encode', 'defaults',
354 'diff', 'email',
354 'diff', 'email',
355 'extensions', 'format',
355 'extensions', 'format',
356 'merge-patterns', 'merge-tools',
356 'merge-patterns', 'merge-tools',
357 'hooks', 'http_proxy',
357 'hooks', 'http_proxy',
358 'smtp', 'patch',
358 'smtp', 'patch',
359 'paths', 'profiling',
359 'paths', 'profiling',
360 'server', 'trusted',
360 'server', 'trusted',
361 'ui', 'web', ]
361 'ui', 'web', ]
362
362
363
363
364 def config_data_from_db(clear_session=True, repo=None):
364 def config_data_from_db(clear_session=True, repo=None):
365 """
365 """
366 Read the configuration data from the database and return configuration
366 Read the configuration data from the database and return configuration
367 tuples.
367 tuples.
368 """
368 """
369 from rhodecode.model.settings import VcsSettingsModel
369 from rhodecode.model.settings import VcsSettingsModel
370
370
371 config = []
371 config = []
372
372
373 sa = meta.Session()
373 sa = meta.Session()
374 settings_model = VcsSettingsModel(repo=repo, sa=sa)
374 settings_model = VcsSettingsModel(repo=repo, sa=sa)
375
375
376 ui_settings = settings_model.get_ui_settings()
376 ui_settings = settings_model.get_ui_settings()
377
377
378 ui_data = []
378 ui_data = []
379 for setting in ui_settings:
379 for setting in ui_settings:
380 if setting.active:
380 if setting.active:
381 ui_data.append((setting.section, setting.key, setting.value))
381 ui_data.append((setting.section, setting.key, setting.value))
382 config.append((
382 config.append((
383 safe_str(setting.section), safe_str(setting.key),
383 safe_str(setting.section), safe_str(setting.key),
384 safe_str(setting.value)))
384 safe_str(setting.value)))
385 if setting.key == 'push_ssl':
385 if setting.key == 'push_ssl':
386 # force set push_ssl requirement to False, rhodecode
386 # force set push_ssl requirement to False, rhodecode
387 # handles that
387 # handles that
388 config.append((
388 config.append((
389 safe_str(setting.section), safe_str(setting.key), False))
389 safe_str(setting.section), safe_str(setting.key), False))
390 log.debug(
390 log.debug(
391 'settings ui from db@repo[%s]: %s',
391 'settings ui from db@repo[%s]: %s',
392 repo,
392 repo,
393 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
393 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
394 if clear_session:
394 if clear_session:
395 meta.Session.remove()
395 meta.Session.remove()
396
396
397 # TODO: mikhail: probably it makes no sense to re-read hooks information.
397 # TODO: mikhail: probably it makes no sense to re-read hooks information.
398 # It's already there and activated/deactivated
398 # It's already there and activated/deactivated
399 skip_entries = []
399 skip_entries = []
400 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
400 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
401 if 'pull' not in enabled_hook_classes:
401 if 'pull' not in enabled_hook_classes:
402 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
402 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
403 if 'push' not in enabled_hook_classes:
403 if 'push' not in enabled_hook_classes:
404 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
404 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
405 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
405 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
406 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
406 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
407
407
408 config = [entry for entry in config if entry[:2] not in skip_entries]
408 config = [entry for entry in config if entry[:2] not in skip_entries]
409
409
410 return config
410 return config
411
411
412
412
413 def make_db_config(clear_session=True, repo=None):
413 def make_db_config(clear_session=True, repo=None):
414 """
414 """
415 Create a :class:`Config` instance based on the values in the database.
415 Create a :class:`Config` instance based on the values in the database.
416 """
416 """
417 config = Config()
417 config = Config()
418 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
418 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
419 for section, option, value in config_data:
419 for section, option, value in config_data:
420 config.set(section, option, value)
420 config.set(section, option, value)
421 return config
421 return config
422
422
423
423
424 def get_enabled_hook_classes(ui_settings):
424 def get_enabled_hook_classes(ui_settings):
425 """
425 """
426 Return the enabled hook classes.
426 Return the enabled hook classes.
427
427
428 :param ui_settings: List of ui_settings as returned
428 :param ui_settings: List of ui_settings as returned
429 by :meth:`VcsSettingsModel.get_ui_settings`
429 by :meth:`VcsSettingsModel.get_ui_settings`
430
430
431 :return: a list with the enabled hook classes. The order is not guaranteed.
431 :return: a list with the enabled hook classes. The order is not guaranteed.
432 :rtype: list
432 :rtype: list
433 """
433 """
434 enabled_hooks = []
434 enabled_hooks = []
435 active_hook_keys = [
435 active_hook_keys = [
436 key for section, key, value, active in ui_settings
436 key for section, key, value, active in ui_settings
437 if section == 'hooks' and active]
437 if section == 'hooks' and active]
438
438
439 hook_names = {
439 hook_names = {
440 RhodeCodeUi.HOOK_PUSH: 'push',
440 RhodeCodeUi.HOOK_PUSH: 'push',
441 RhodeCodeUi.HOOK_PULL: 'pull',
441 RhodeCodeUi.HOOK_PULL: 'pull',
442 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
442 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
443 }
443 }
444
444
445 for key in active_hook_keys:
445 for key in active_hook_keys:
446 hook = hook_names.get(key)
446 hook = hook_names.get(key)
447 if hook:
447 if hook:
448 enabled_hooks.append(hook)
448 enabled_hooks.append(hook)
449
449
450 return enabled_hooks
450 return enabled_hooks
451
451
452
452
453 def set_rhodecode_config(config):
453 def set_rhodecode_config(config):
454 """
454 """
455 Updates pyramid config with new settings from database
455 Updates pyramid config with new settings from database
456
456
457 :param config:
457 :param config:
458 """
458 """
459 from rhodecode.model.settings import SettingsModel
459 from rhodecode.model.settings import SettingsModel
460 app_settings = SettingsModel().get_all_settings()
460 app_settings = SettingsModel().get_all_settings()
461
461
462 for k, v in list(app_settings.items()):
462 for k, v in list(app_settings.items()):
463 config[k] = v
463 config[k] = v
464
464
465
465
466 def get_rhodecode_realm():
466 def get_rhodecode_realm():
467 """
467 """
468 Return the rhodecode realm from database.
468 Return the rhodecode realm from database.
469 """
469 """
470 from rhodecode.model.settings import SettingsModel
470 from rhodecode.model.settings import SettingsModel
471 realm = SettingsModel().get_setting_by_name('realm')
471 realm = SettingsModel().get_setting_by_name('realm')
472 return safe_str(realm.app_settings_value)
472 return safe_str(realm.app_settings_value)
473
473
474
474
475 def get_rhodecode_base_path():
475 def get_rhodecode_repo_store_path():
476 """
476 """
477 Returns the base path. The base path is the filesystem path which points
477 Returns the base path. The base path is the filesystem path which points
478 to the repository store.
478 to the repository store.
479 """
479 """
480
480
481 import rhodecode
481 import rhodecode
482 return rhodecode.CONFIG['default_base_path']
482 return rhodecode.CONFIG['repo_store.path']
483
483
484
484
485 def map_groups(path):
485 def map_groups(path):
486 """
486 """
487 Given a full path to a repository, create all nested groups that this
487 Given a full path to a repository, create all nested groups that this
488 repo is inside. This function creates parent-child relationships between
488 repo is inside. This function creates parent-child relationships between
489 groups and creates default perms for all new groups.
489 groups and creates default perms for all new groups.
490
490
491 :param paths: full path to repository
491 :param paths: full path to repository
492 """
492 """
493 from rhodecode.model.repo_group import RepoGroupModel
493 from rhodecode.model.repo_group import RepoGroupModel
494 sa = meta.Session()
494 sa = meta.Session()
495 groups = path.split(Repository.NAME_SEP)
495 groups = path.split(Repository.NAME_SEP)
496 parent = None
496 parent = None
497 group = None
497 group = None
498
498
499 # last element is repo in nested groups structure
499 # last element is repo in nested groups structure
500 groups = groups[:-1]
500 groups = groups[:-1]
501 rgm = RepoGroupModel(sa)
501 rgm = RepoGroupModel(sa)
502 owner = User.get_first_super_admin()
502 owner = User.get_first_super_admin()
503 for lvl, group_name in enumerate(groups):
503 for lvl, group_name in enumerate(groups):
504 group_name = '/'.join(groups[:lvl] + [group_name])
504 group_name = '/'.join(groups[:lvl] + [group_name])
505 group = RepoGroup.get_by_group_name(group_name)
505 group = RepoGroup.get_by_group_name(group_name)
506 desc = '%s group' % group_name
506 desc = '%s group' % group_name
507
507
508 # skip folders that are now removed repos
508 # skip folders that are now removed repos
509 if REMOVED_REPO_PAT.match(group_name):
509 if REMOVED_REPO_PAT.match(group_name):
510 break
510 break
511
511
512 if group is None:
512 if group is None:
513 log.debug('creating group level: %s group_name: %s',
513 log.debug('creating group level: %s group_name: %s',
514 lvl, group_name)
514 lvl, group_name)
515 group = RepoGroup(group_name, parent)
515 group = RepoGroup(group_name, parent)
516 group.group_description = desc
516 group.group_description = desc
517 group.user = owner
517 group.user = owner
518 sa.add(group)
518 sa.add(group)
519 perm_obj = rgm._create_default_perms(group)
519 perm_obj = rgm._create_default_perms(group)
520 sa.add(perm_obj)
520 sa.add(perm_obj)
521 sa.flush()
521 sa.flush()
522
522
523 parent = group
523 parent = group
524 return group
524 return group
525
525
526
526
527 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
527 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
528 """
528 """
529 maps all repos given in initial_repo_list, non existing repositories
529 maps all repos given in initial_repo_list, non existing repositories
530 are created, if remove_obsolete is True it also checks for db entries
530 are created, if remove_obsolete is True it also checks for db entries
531 that are not in initial_repo_list and removes them.
531 that are not in initial_repo_list and removes them.
532
532
533 :param initial_repo_list: list of repositories found by scanning methods
533 :param initial_repo_list: list of repositories found by scanning methods
534 :param remove_obsolete: check for obsolete entries in database
534 :param remove_obsolete: check for obsolete entries in database
535 """
535 """
536 from rhodecode.model.repo import RepoModel
536 from rhodecode.model.repo import RepoModel
537 from rhodecode.model.repo_group import RepoGroupModel
537 from rhodecode.model.repo_group import RepoGroupModel
538 from rhodecode.model.settings import SettingsModel
538 from rhodecode.model.settings import SettingsModel
539
539
540 sa = meta.Session()
540 sa = meta.Session()
541 repo_model = RepoModel()
541 repo_model = RepoModel()
542 user = User.get_first_super_admin()
542 user = User.get_first_super_admin()
543 added = []
543 added = []
544
544
545 # creation defaults
545 # creation defaults
546 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
546 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
547 enable_statistics = defs.get('repo_enable_statistics')
547 enable_statistics = defs.get('repo_enable_statistics')
548 enable_locking = defs.get('repo_enable_locking')
548 enable_locking = defs.get('repo_enable_locking')
549 enable_downloads = defs.get('repo_enable_downloads')
549 enable_downloads = defs.get('repo_enable_downloads')
550 private = defs.get('repo_private')
550 private = defs.get('repo_private')
551
551
552 for name, repo in list(initial_repo_list.items()):
552 for name, repo in list(initial_repo_list.items()):
553 group = map_groups(name)
553 group = map_groups(name)
554 str_name = safe_str(name)
554 str_name = safe_str(name)
555 db_repo = repo_model.get_by_repo_name(str_name)
555 db_repo = repo_model.get_by_repo_name(str_name)
556
556
557 # found repo that is on filesystem not in RhodeCode database
557 # found repo that is on filesystem not in RhodeCode database
558 if not db_repo:
558 if not db_repo:
559 log.info('repository `%s` not found in the database, creating now', name)
559 log.info('repository `%s` not found in the database, creating now', name)
560 added.append(name)
560 added.append(name)
561 desc = (repo.description
561 desc = (repo.description
562 if repo.description != 'unknown'
562 if repo.description != 'unknown'
563 else '%s repository' % name)
563 else '%s repository' % name)
564
564
565 db_repo = repo_model._create_repo(
565 db_repo = repo_model._create_repo(
566 repo_name=name,
566 repo_name=name,
567 repo_type=repo.alias,
567 repo_type=repo.alias,
568 description=desc,
568 description=desc,
569 repo_group=getattr(group, 'group_id', None),
569 repo_group=getattr(group, 'group_id', None),
570 owner=user,
570 owner=user,
571 enable_locking=enable_locking,
571 enable_locking=enable_locking,
572 enable_downloads=enable_downloads,
572 enable_downloads=enable_downloads,
573 enable_statistics=enable_statistics,
573 enable_statistics=enable_statistics,
574 private=private,
574 private=private,
575 state=Repository.STATE_CREATED
575 state=Repository.STATE_CREATED
576 )
576 )
577 sa.commit()
577 sa.commit()
578 # we added that repo just now, and make sure we updated server info
578 # we added that repo just now, and make sure we updated server info
579 if db_repo.repo_type == 'git':
579 if db_repo.repo_type == 'git':
580 git_repo = db_repo.scm_instance()
580 git_repo = db_repo.scm_instance()
581 # update repository server-info
581 # update repository server-info
582 log.debug('Running update server info')
582 log.debug('Running update server info')
583 git_repo._update_server_info(force=True)
583 git_repo._update_server_info(force=True)
584
584
585 db_repo.update_commit_cache()
585 db_repo.update_commit_cache()
586
586
587 config = db_repo._config
587 config = db_repo._config
588 config.set('extensions', 'largefiles', '')
588 config.set('extensions', 'largefiles', '')
589 repo = db_repo.scm_instance(config=config)
589 repo = db_repo.scm_instance(config=config)
590 repo.install_hooks(force=force_hooks_rebuild)
590 repo.install_hooks(force=force_hooks_rebuild)
591
591
592 removed = []
592 removed = []
593 if remove_obsolete:
593 if remove_obsolete:
594 # remove from database those repositories that are not in the filesystem
594 # remove from database those repositories that are not in the filesystem
595 for repo in sa.query(Repository).all():
595 for repo in sa.query(Repository).all():
596 if repo.repo_name not in list(initial_repo_list.keys()):
596 if repo.repo_name not in list(initial_repo_list.keys()):
597 log.debug("Removing non-existing repository found in db `%s`",
597 log.debug("Removing non-existing repository found in db `%s`",
598 repo.repo_name)
598 repo.repo_name)
599 try:
599 try:
600 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
600 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
601 sa.commit()
601 sa.commit()
602 removed.append(repo.repo_name)
602 removed.append(repo.repo_name)
603 except Exception:
603 except Exception:
604 # don't hold further removals on error
604 # don't hold further removals on error
605 log.error(traceback.format_exc())
605 log.error(traceback.format_exc())
606 sa.rollback()
606 sa.rollback()
607
607
608 def splitter(full_repo_name):
608 def splitter(full_repo_name):
609 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
609 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
610 gr_name = None
610 gr_name = None
611 if len(_parts) == 2:
611 if len(_parts) == 2:
612 gr_name = _parts[0]
612 gr_name = _parts[0]
613 return gr_name
613 return gr_name
614
614
615 initial_repo_group_list = [splitter(x) for x in
615 initial_repo_group_list = [splitter(x) for x in
616 list(initial_repo_list.keys()) if splitter(x)]
616 list(initial_repo_list.keys()) if splitter(x)]
617
617
618 # remove from database those repository groups that are not in the
618 # remove from database those repository groups that are not in the
619 # filesystem due to parent child relationships we need to delete them
619 # filesystem due to parent child relationships we need to delete them
620 # in a specific order of most nested first
620 # in a specific order of most nested first
621 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
621 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
622 def nested_sort(gr):
622 def nested_sort(gr):
623 return len(gr.split('/'))
623 return len(gr.split('/'))
624 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
624 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
625 if group_name not in initial_repo_group_list:
625 if group_name not in initial_repo_group_list:
626 repo_group = RepoGroup.get_by_group_name(group_name)
626 repo_group = RepoGroup.get_by_group_name(group_name)
627 if (repo_group.children.all() or
627 if (repo_group.children.all() or
628 not RepoGroupModel().check_exist_filesystem(
628 not RepoGroupModel().check_exist_filesystem(
629 group_name=group_name, exc_on_failure=False)):
629 group_name=group_name, exc_on_failure=False)):
630 continue
630 continue
631
631
632 log.info(
632 log.info(
633 'Removing non-existing repository group found in db `%s`',
633 'Removing non-existing repository group found in db `%s`',
634 group_name)
634 group_name)
635 try:
635 try:
636 RepoGroupModel(sa).delete(group_name, fs_remove=False)
636 RepoGroupModel(sa).delete(group_name, fs_remove=False)
637 sa.commit()
637 sa.commit()
638 removed.append(group_name)
638 removed.append(group_name)
639 except Exception:
639 except Exception:
640 # don't hold further removals on error
640 # don't hold further removals on error
641 log.exception(
641 log.exception(
642 'Unable to remove repository group `%s`',
642 'Unable to remove repository group `%s`',
643 group_name)
643 group_name)
644 sa.rollback()
644 sa.rollback()
645 raise
645 raise
646
646
647 return added, removed
647 return added, removed
648
648
649
649
650 def load_rcextensions(root_path):
650 def load_rcextensions(root_path):
651 import rhodecode
651 import rhodecode
652 from rhodecode.config import conf
652 from rhodecode.config import conf
653
653
654 path = os.path.join(root_path)
654 path = os.path.join(root_path)
655 sys.path.append(path)
655 sys.path.append(path)
656
656
657 try:
657 try:
658 rcextensions = __import__('rcextensions')
658 rcextensions = __import__('rcextensions')
659 except ImportError:
659 except ImportError:
660 if os.path.isdir(os.path.join(path, 'rcextensions')):
660 if os.path.isdir(os.path.join(path, 'rcextensions')):
661 log.warning('Unable to load rcextensions from %s', path)
661 log.warning('Unable to load rcextensions from %s', path)
662 rcextensions = None
662 rcextensions = None
663
663
664 if rcextensions:
664 if rcextensions:
665 log.info('Loaded rcextensions from %s...', rcextensions)
665 log.info('Loaded rcextensions from %s...', rcextensions)
666 rhodecode.EXTENSIONS = rcextensions
666 rhodecode.EXTENSIONS = rcextensions
667
667
668 # Additional mappings that are not present in the pygments lexers
668 # Additional mappings that are not present in the pygments lexers
669 conf.LANGUAGES_EXTENSIONS_MAP.update(
669 conf.LANGUAGES_EXTENSIONS_MAP.update(
670 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
670 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
671
671
672
672
673 def get_custom_lexer(extension):
673 def get_custom_lexer(extension):
674 """
674 """
675 returns a custom lexer if it is defined in rcextensions module, or None
675 returns a custom lexer if it is defined in rcextensions module, or None
676 if there's no custom lexer defined
676 if there's no custom lexer defined
677 """
677 """
678 import rhodecode
678 import rhodecode
679 from pygments import lexers
679 from pygments import lexers
680
680
681 # custom override made by RhodeCode
681 # custom override made by RhodeCode
682 if extension in ['mako']:
682 if extension in ['mako']:
683 return lexers.get_lexer_by_name('html+mako')
683 return lexers.get_lexer_by_name('html+mako')
684
684
685 # check if we didn't define this extension as other lexer
685 # check if we didn't define this extension as other lexer
686 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
686 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
687 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
687 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
688 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
688 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
689 return lexers.get_lexer_by_name(_lexer_name)
689 return lexers.get_lexer_by_name(_lexer_name)
690
690
691
691
692 #==============================================================================
692 #==============================================================================
693 # TEST FUNCTIONS AND CREATORS
693 # TEST FUNCTIONS AND CREATORS
694 #==============================================================================
694 #==============================================================================
695 def create_test_index(repo_location, config):
695 def create_test_index(repo_location, config):
696 """
696 """
697 Makes default test index.
697 Makes default test index.
698 """
698 """
699 try:
699 try:
700 import rc_testdata
700 import rc_testdata
701 except ImportError:
701 except ImportError:
702 raise ImportError('Failed to import rc_testdata, '
702 raise ImportError('Failed to import rc_testdata, '
703 'please make sure this package is installed from requirements_test.txt')
703 'please make sure this package is installed from requirements_test.txt')
704 rc_testdata.extract_search_index(
704 rc_testdata.extract_search_index(
705 'vcs_search_index', os.path.dirname(config['search.location']))
705 'vcs_search_index', os.path.dirname(config['search.location']))
706
706
707
707
708 def create_test_directory(test_path):
708 def create_test_directory(test_path):
709 """
709 """
710 Create test directory if it doesn't exist.
710 Create test directory if it doesn't exist.
711 """
711 """
712 if not os.path.isdir(test_path):
712 if not os.path.isdir(test_path):
713 log.debug('Creating testdir %s', test_path)
713 log.debug('Creating testdir %s', test_path)
714 os.makedirs(test_path)
714 os.makedirs(test_path)
715
715
716
716
717 def create_test_database(test_path, config):
717 def create_test_database(test_path, config):
718 """
718 """
719 Makes a fresh database.
719 Makes a fresh database.
720 """
720 """
721 from rhodecode.lib.db_manage import DbManage
721 from rhodecode.lib.db_manage import DbManage
722 from rhodecode.lib.utils2 import get_encryption_key
722 from rhodecode.lib.utils2 import get_encryption_key
723
723
724 # PART ONE create db
724 # PART ONE create db
725 dbconf = config['sqlalchemy.db1.url']
725 dbconf = config['sqlalchemy.db1.url']
726 enc_key = get_encryption_key(config)
726 enc_key = get_encryption_key(config)
727
727
728 log.debug('making test db %s', dbconf)
728 log.debug('making test db %s', dbconf)
729
729
730 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
730 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
731 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
731 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
732 dbmanage.create_tables(override=True)
732 dbmanage.create_tables(override=True)
733 dbmanage.set_db_version()
733 dbmanage.set_db_version()
734 # for tests dynamically set new root paths based on generated content
734 # for tests dynamically set new root paths based on generated content
735 dbmanage.create_settings(dbmanage.config_prompt(test_path))
735 dbmanage.create_settings(dbmanage.config_prompt(test_path))
736 dbmanage.create_default_user()
736 dbmanage.create_default_user()
737 dbmanage.create_test_admin_and_users()
737 dbmanage.create_test_admin_and_users()
738 dbmanage.create_permissions()
738 dbmanage.create_permissions()
739 dbmanage.populate_default_permissions()
739 dbmanage.populate_default_permissions()
740 Session().commit()
740 Session().commit()
741
741
742
742
743 def create_test_repositories(test_path, config):
743 def create_test_repositories(test_path, config):
744 """
744 """
745 Creates test repositories in the temporary directory. Repositories are
745 Creates test repositories in the temporary directory. Repositories are
746 extracted from archives within the rc_testdata package.
746 extracted from archives within the rc_testdata package.
747 """
747 """
748 import rc_testdata
748 import rc_testdata
749 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
749 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
750
750
751 log.debug('making test vcs repositories')
751 log.debug('making test vcs repositories')
752
752
753 idx_path = config['search.location']
753 idx_path = config['search.location']
754 data_path = config['cache_dir']
754 data_path = config['cache_dir']
755
755
756 # clean index and data
756 # clean index and data
757 if idx_path and os.path.exists(idx_path):
757 if idx_path and os.path.exists(idx_path):
758 log.debug('remove %s', idx_path)
758 log.debug('remove %s', idx_path)
759 shutil.rmtree(idx_path)
759 shutil.rmtree(idx_path)
760
760
761 if data_path and os.path.exists(data_path):
761 if data_path and os.path.exists(data_path):
762 log.debug('remove %s', data_path)
762 log.debug('remove %s', data_path)
763 shutil.rmtree(data_path)
763 shutil.rmtree(data_path)
764
764
765 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
765 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
766 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
766 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
767
767
768 # Note: Subversion is in the process of being integrated with the system,
768 # Note: Subversion is in the process of being integrated with the system,
769 # until we have a properly packed version of the test svn repository, this
769 # until we have a properly packed version of the test svn repository, this
770 # tries to copy over the repo from a package "rc_testdata"
770 # tries to copy over the repo from a package "rc_testdata"
771 svn_repo_path = rc_testdata.get_svn_repo_archive()
771 svn_repo_path = rc_testdata.get_svn_repo_archive()
772 with tarfile.open(svn_repo_path) as tar:
772 with tarfile.open(svn_repo_path) as tar:
773 tar.extractall(jn(test_path, SVN_REPO))
773 tar.extractall(jn(test_path, SVN_REPO))
774
774
775
775
776 def password_changed(auth_user, session):
776 def password_changed(auth_user, session):
777 # Never report password change in case of default user or anonymous user.
777 # Never report password change in case of default user or anonymous user.
778 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
778 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
779 return False
779 return False
780
780
781 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
781 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
782 rhodecode_user = session.get('rhodecode_user', {})
782 rhodecode_user = session.get('rhodecode_user', {})
783 session_password_hash = rhodecode_user.get('password', '')
783 session_password_hash = rhodecode_user.get('password', '')
784 return password_hash != session_password_hash
784 return password_hash != session_password_hash
785
785
786
786
787 def read_opensource_licenses():
787 def read_opensource_licenses():
788 global _license_cache
788 global _license_cache
789
789
790 if not _license_cache:
790 if not _license_cache:
791 licenses = pkg_resources.resource_string(
791 licenses = pkg_resources.resource_string(
792 'rhodecode', 'config/licenses.json')
792 'rhodecode', 'config/licenses.json')
793 _license_cache = json.loads(licenses)
793 _license_cache = json.loads(licenses)
794
794
795 return _license_cache
795 return _license_cache
796
796
797
797
798 def generate_platform_uuid():
798 def generate_platform_uuid():
799 """
799 """
800 Generates platform UUID based on it's name
800 Generates platform UUID based on it's name
801 """
801 """
802 import platform
802 import platform
803
803
804 try:
804 try:
805 uuid_list = [platform.platform()]
805 uuid_list = [platform.platform()]
806 return sha256_safe(':'.join(uuid_list))
806 return sha256_safe(':'.join(uuid_list))
807 except Exception as e:
807 except Exception as e:
808 log.error('Failed to generate host uuid: %s', e)
808 log.error('Failed to generate host uuid: %s', e)
809 return 'UNDEFINED'
809 return 'UNDEFINED'
810
810
811
811
812 def send_test_email(recipients, email_body='TEST EMAIL'):
812 def send_test_email(recipients, email_body='TEST EMAIL'):
813 """
813 """
814 Simple code for generating test emails.
814 Simple code for generating test emails.
815 Usage::
815 Usage::
816
816
817 from rhodecode.lib import utils
817 from rhodecode.lib import utils
818 utils.send_test_email()
818 utils.send_test_email()
819 """
819 """
820 from rhodecode.lib.celerylib import tasks, run_task
820 from rhodecode.lib.celerylib import tasks, run_task
821
821
822 email_body = email_body_plaintext = email_body
822 email_body = email_body_plaintext = email_body
823 subject = f'SUBJECT FROM: {socket.gethostname()}'
823 subject = f'SUBJECT FROM: {socket.gethostname()}'
824 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
824 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,140 +1,149 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 import logging
20 import logging
21
21
22 import rhodecode
22 import rhodecode
23 from rhodecode.model import meta, db
23 from rhodecode.model import meta, db
24 from rhodecode.lib.utils import get_rhodecode_repo_store_path
24 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
25 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
25
26
26 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
27
28
28
29
29 def init_model(engine, encryption_key: bytes = b''):
30 def init_model(engine, encryption_key: bytes = b''):
30 """
31 """
31 Initializes db session, bind the engine with the metadata,
32 Initializes db session, bind the engine with the metadata,
32 Call this before using any of the tables or classes in the model,
33 Call this before using any of the tables or classes in the model,
33 preferably once in application start
34 preferably once in application start
34
35
35 :param engine: engine to bind to
36 :param engine: engine to bind to
36 :param encryption_key: key used for encryption
37 :param encryption_key: key used for encryption
37 """
38 """
38
39
39 engine_str = obfuscate_url_pw(str(engine.url))
40 engine_str = obfuscate_url_pw(str(engine.url))
40 log.info("RhodeCode %s initializing db for %s", rhodecode.__version__, engine_str)
41 log.info("RhodeCode %s initializing db for %s", rhodecode.__version__, engine_str)
41
42
42 meta.bind_engine_to_session(engine)
43 meta.bind_engine_to_session(engine)
43 init_model_encryption(db, enc_key=encryption_key)
44 init_model_encryption(db, enc_key=encryption_key)
44
45
45
46
46 def init_model_encryption(*db_models, enc_key: bytes = b'', config=None):
47 def init_model_encryption(*db_models, enc_key: bytes = b'', config=None):
47 if not enc_key:
48 if not enc_key:
48 from pyramid.threadlocal import get_current_registry
49 from pyramid.threadlocal import get_current_registry
49 config = config or get_current_registry().settings
50 config = config or get_current_registry().settings
50 enc_key = get_encryption_key(config)
51 enc_key = get_encryption_key(config)
51
52
52 for db_model in db_models:
53 for db_model in db_models:
53 log.debug('setting encryption key for model %s', db_model)
54 log.debug('setting encryption key for model %s', db_model)
54 db_model.ENCRYPTION_KEY = enc_key
55 db_model.ENCRYPTION_KEY = enc_key
55
56
56
57
57 class BaseModel(object):
58 class BaseModel(object):
58 """
59 """
59 Base Model for all RhodeCode models, it adds sql alchemy session
60 Base Model for all RhodeCode models, it adds sql alchemy session
60 into instance of model
61 into instance of model
61
62
62 :param sa: If passed it reuses this session instead of creating a new one
63 :param sa: If passed it reuses this session instead of creating a new one
63 """
64 """
64
65
65 cls = None # override in child class
66 cls = None # override in child class
66
67
67 def __init__(self, sa=None):
68 def __init__(self, sa=None):
68 if sa is not None:
69 if sa is not None:
69 self.sa = sa
70 self.sa = sa
70 else:
71 else:
71 self.sa = meta.Session()
72 self.sa = meta.Session()
72
73
73 def _get_instance(self, cls, instance, callback=None):
74 def _get_instance(self, cls, instance, callback=None):
74 """
75 """
75 Gets instance of given cls using some simple lookup mechanism.
76 Gets instance of given cls using some simple lookup mechanism.
76
77
77 :param cls: classes to fetch
78 :param cls: classes to fetch
78 :param instance: int or Instance
79 :param instance: int or Instance
79 :param callback: callback to call if all lookups failed
80 :param callback: callback to call if all lookups failed
80 """
81 """
81
82
82 if isinstance(instance, cls):
83 if isinstance(instance, cls):
83 return instance
84 return instance
84 elif isinstance(instance, int):
85 elif isinstance(instance, int):
85 if isinstance(cls, tuple):
86 if isinstance(cls, tuple):
86 # if we pass multi instances we pick first to .get()
87 # if we pass multi instances we pick first to .get()
87 cls = cls[0]
88 cls = cls[0]
88 return cls.get(instance)
89 return cls.get(instance)
89 else:
90 else:
90 if instance:
91 if instance:
91 if callback is None:
92 if callback is None:
92 raise Exception(
93 raise Exception(
93 'given object must be int or Instance of %s '
94 'given object must be int or Instance of %s '
94 'got %s, no callback provided' % (cls, type(instance))
95 'got %s, no callback provided' % (cls, type(instance))
95 )
96 )
96 else:
97 else:
97 return callback(instance)
98 return callback(instance)
98
99
99 def _get_user(self, user):
100 def _get_user(self, user):
100 """
101 """
101 Helper method to get user by ID, or username fallback
102 Helper method to get user by ID, or username fallback
102
103
103 :param user: UserID, username, or User instance
104 :param user: UserID, username, or User instance
104 """
105 """
105 return self._get_instance(
106 return self._get_instance(
106 db.User, user, callback=db.User.get_by_username)
107 db.User, user, callback=db.User.get_by_username)
107
108
108 def _get_user_group(self, user_group):
109 def _get_user_group(self, user_group):
109 """
110 """
110 Helper method to get user by ID, or username fallback
111 Helper method to get user by ID, or username fallback
111
112
112 :param user_group: UserGroupID, user_group_name, or UserGroup instance
113 :param user_group: UserGroupID, user_group_name, or UserGroup instance
113 """
114 """
114 return self._get_instance(
115 return self._get_instance(
115 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
116 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
116
117
117 def _get_repo(self, repository):
118 def _get_repo(self, repository):
118 """
119 """
119 Helper method to get repository by ID, or repository name
120 Helper method to get repository by ID, or repository name
120
121
121 :param repository: RepoID, repository name or Repository Instance
122 :param repository: RepoID, repository name or Repository Instance
122 """
123 """
123 return self._get_instance(
124 return self._get_instance(
124 db.Repository, repository, callback=db.Repository.get_by_repo_name)
125 db.Repository, repository, callback=db.Repository.get_by_repo_name)
125
126
126 def _get_perm(self, permission):
127 def _get_perm(self, permission):
127 """
128 """
128 Helper method to get permission by ID, or permission name
129 Helper method to get permission by ID, or permission name
129
130
130 :param permission: PermissionID, permission_name or Permission instance
131 :param permission: PermissionID, permission_name or Permission instance
131 """
132 """
132 return self._get_instance(
133 return self._get_instance(
133 db.Permission, permission, callback=db.Permission.get_by_key)
134 db.Permission, permission, callback=db.Permission.get_by_key)
134
135
135 @classmethod
136 @classmethod
136 def get_all(cls):
137 def get_all(cls):
137 """
138 """
138 Returns all instances of what is defined in `cls` class variable
139 Returns all instances of what is defined in `cls` class variable
139 """
140 """
140 return cls.cls.getAll()
141 return cls.cls.getAll()
142
143 @property
144 def repos_path(self):
145 """
146 Gets the repositories root path from *ini file
147 """
148
149 return get_rhodecode_repo_store_path()
@@ -1,5887 +1,5884 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Database Models for RhodeCode Enterprise
20 Database Models for RhodeCode Enterprise
21 """
21 """
22
22
23 import re
23 import re
24 import os
24 import os
25 import time
25 import time
26 import string
26 import string
27 import logging
27 import logging
28 import datetime
28 import datetime
29 import uuid
29 import uuid
30 import warnings
30 import warnings
31 import ipaddress
31 import ipaddress
32 import functools
32 import functools
33 import traceback
33 import traceback
34 import collections
34 import collections
35
35
36 from sqlalchemy import (
36 from sqlalchemy import (
37 or_, and_, not_, func, cast, TypeDecorator, event, select,
37 or_, and_, not_, func, cast, TypeDecorator, event, select,
38 true, false, null,
38 true, false, null,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 Text, Float, PickleType, BigInteger)
41 Text, Float, PickleType, BigInteger)
42 from sqlalchemy.sql.expression import case
42 from sqlalchemy.sql.expression import case
43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 from sqlalchemy.orm import (
44 from sqlalchemy.orm import (
45 relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only)
45 relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only)
46 from sqlalchemy.ext.declarative import declared_attr
46 from sqlalchemy.ext.declarative import declared_attr
47 from sqlalchemy.ext.hybrid import hybrid_property
47 from sqlalchemy.ext.hybrid import hybrid_property
48 from sqlalchemy.exc import IntegrityError # pragma: no cover
48 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 from sqlalchemy.dialects.mysql import LONGTEXT
49 from sqlalchemy.dialects.mysql import LONGTEXT
50 from zope.cachedescriptors.property import Lazy as LazyProperty
50 from zope.cachedescriptors.property import Lazy as LazyProperty
51 from pyramid.threadlocal import get_current_request
51 from pyramid.threadlocal import get_current_request
52 from webhelpers2.text import remove_formatting
52 from webhelpers2.text import remove_formatting
53
53
54 from rhodecode.lib.str_utils import safe_bytes
54 from rhodecode.lib.str_utils import safe_bytes
55 from rhodecode.translation import _
55 from rhodecode.translation import _
56 from rhodecode.lib.vcs import get_vcs_instance, VCSError
56 from rhodecode.lib.vcs import get_vcs_instance, VCSError
57 from rhodecode.lib.vcs.backends.base import (
57 from rhodecode.lib.vcs.backends.base import (
58 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
58 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
59 from rhodecode.lib.utils2 import (
59 from rhodecode.lib.utils2 import (
60 str2bool, safe_str, get_commit_safe, sha1_safe,
60 str2bool, safe_str, get_commit_safe, sha1_safe,
61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
62 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
62 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
63 from rhodecode.lib.jsonalchemy import (
63 from rhodecode.lib.jsonalchemy import (
64 MutationObj, MutationList, JsonType, JsonRaw)
64 MutationObj, MutationList, JsonType, JsonRaw)
65 from rhodecode.lib.hash_utils import sha1
65 from rhodecode.lib.hash_utils import sha1
66 from rhodecode.lib import ext_json
66 from rhodecode.lib import ext_json
67 from rhodecode.lib import enc_utils
67 from rhodecode.lib import enc_utils
68 from rhodecode.lib.ext_json import json, str_json
68 from rhodecode.lib.ext_json import json, str_json
69 from rhodecode.lib.caching_query import FromCache
69 from rhodecode.lib.caching_query import FromCache
70 from rhodecode.lib.exceptions import (
70 from rhodecode.lib.exceptions import (
71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 from rhodecode.model.meta import Base, Session
72 from rhodecode.model.meta import Base, Session
73
73
74 URL_SEP = '/'
74 URL_SEP = '/'
75 log = logging.getLogger(__name__)
75 log = logging.getLogger(__name__)
76
76
77 # =============================================================================
77 # =============================================================================
78 # BASE CLASSES
78 # BASE CLASSES
79 # =============================================================================
79 # =============================================================================
80
80
81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 # beaker.session.secret if first is not set.
82 # beaker.session.secret if first is not set.
83 # and initialized at environment.py
83 # and initialized at environment.py
84 ENCRYPTION_KEY: bytes = b''
84 ENCRYPTION_KEY: bytes = b''
85
85
86 # used to sort permissions by types, '#' used here is not allowed to be in
86 # used to sort permissions by types, '#' used here is not allowed to be in
87 # usernames, and it's very early in sorted string.printable table.
87 # usernames, and it's very early in sorted string.printable table.
88 PERMISSION_TYPE_SORT = {
88 PERMISSION_TYPE_SORT = {
89 'admin': '####',
89 'admin': '####',
90 'write': '###',
90 'write': '###',
91 'read': '##',
91 'read': '##',
92 'none': '#',
92 'none': '#',
93 }
93 }
94
94
95
95
96 def display_user_sort(obj):
96 def display_user_sort(obj):
97 """
97 """
98 Sort function used to sort permissions in .permissions() function of
98 Sort function used to sort permissions in .permissions() function of
99 Repository, RepoGroup, UserGroup. Also it put the default user in front
99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 of all other resources
100 of all other resources
101 """
101 """
102
102
103 if obj.username == User.DEFAULT_USER:
103 if obj.username == User.DEFAULT_USER:
104 return '#####'
104 return '#####'
105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 extra_sort_num = '1' # default
106 extra_sort_num = '1' # default
107
107
108 # NOTE(dan): inactive duplicates goes last
108 # NOTE(dan): inactive duplicates goes last
109 if getattr(obj, 'duplicate_perm', None):
109 if getattr(obj, 'duplicate_perm', None):
110 extra_sort_num = '9'
110 extra_sort_num = '9'
111 return prefix + extra_sort_num + obj.username
111 return prefix + extra_sort_num + obj.username
112
112
113
113
114 def display_user_group_sort(obj):
114 def display_user_group_sort(obj):
115 """
115 """
116 Sort function used to sort permissions in .permissions() function of
116 Sort function used to sort permissions in .permissions() function of
117 Repository, RepoGroup, UserGroup. Also it put the default user in front
117 Repository, RepoGroup, UserGroup. Also it put the default user in front
118 of all other resources
118 of all other resources
119 """
119 """
120
120
121 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
121 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
122 return prefix + obj.users_group_name
122 return prefix + obj.users_group_name
123
123
124
124
125 def _hash_key(k):
125 def _hash_key(k):
126 return sha1_safe(k)
126 return sha1_safe(k)
127
127
128
128
129 def in_filter_generator(qry, items, limit=500):
129 def in_filter_generator(qry, items, limit=500):
130 """
130 """
131 Splits IN() into multiple with OR
131 Splits IN() into multiple with OR
132 e.g.::
132 e.g.::
133 cnt = Repository.query().filter(
133 cnt = Repository.query().filter(
134 or_(
134 or_(
135 *in_filter_generator(Repository.repo_id, range(100000))
135 *in_filter_generator(Repository.repo_id, range(100000))
136 )).count()
136 )).count()
137 """
137 """
138 if not items:
138 if not items:
139 # empty list will cause empty query which might cause security issues
139 # empty list will cause empty query which might cause security issues
140 # this can lead to hidden unpleasant results
140 # this can lead to hidden unpleasant results
141 items = [-1]
141 items = [-1]
142
142
143 parts = []
143 parts = []
144 for chunk in range(0, len(items), limit):
144 for chunk in range(0, len(items), limit):
145 parts.append(
145 parts.append(
146 qry.in_(items[chunk: chunk + limit])
146 qry.in_(items[chunk: chunk + limit])
147 )
147 )
148
148
149 return parts
149 return parts
150
150
151
151
152 base_table_args = {
152 base_table_args = {
153 'extend_existing': True,
153 'extend_existing': True,
154 'mysql_engine': 'InnoDB',
154 'mysql_engine': 'InnoDB',
155 'mysql_charset': 'utf8',
155 'mysql_charset': 'utf8',
156 'sqlite_autoincrement': True
156 'sqlite_autoincrement': True
157 }
157 }
158
158
159
159
160 class EncryptedTextValue(TypeDecorator):
160 class EncryptedTextValue(TypeDecorator):
161 """
161 """
162 Special column for encrypted long text data, use like::
162 Special column for encrypted long text data, use like::
163
163
164 value = Column("encrypted_value", EncryptedValue(), nullable=False)
164 value = Column("encrypted_value", EncryptedValue(), nullable=False)
165
165
166 This column is intelligent so if value is in unencrypted form it return
166 This column is intelligent so if value is in unencrypted form it return
167 unencrypted form, but on save it always encrypts
167 unencrypted form, but on save it always encrypts
168 """
168 """
169 cache_ok = True
169 cache_ok = True
170 impl = Text
170 impl = Text
171
171
172 def process_bind_param(self, value, dialect):
172 def process_bind_param(self, value, dialect):
173 """
173 """
174 Setter for storing value
174 Setter for storing value
175 """
175 """
176 import rhodecode
176 import rhodecode
177 if not value:
177 if not value:
178 return value
178 return value
179
179
180 # protect against double encrypting if values is already encrypted
180 # protect against double encrypting if values is already encrypted
181 if value.startswith('enc$aes$') \
181 if value.startswith('enc$aes$') \
182 or value.startswith('enc$aes_hmac$') \
182 or value.startswith('enc$aes_hmac$') \
183 or value.startswith('enc2$'):
183 or value.startswith('enc2$'):
184 raise ValueError('value needs to be in unencrypted format, '
184 raise ValueError('value needs to be in unencrypted format, '
185 'ie. not starting with enc$ or enc2$')
185 'ie. not starting with enc$ or enc2$')
186
186
187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
188 bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
188 bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
189 return safe_str(bytes_val)
189 return safe_str(bytes_val)
190
190
191 def process_result_value(self, value, dialect):
191 def process_result_value(self, value, dialect):
192 """
192 """
193 Getter for retrieving value
193 Getter for retrieving value
194 """
194 """
195
195
196 import rhodecode
196 import rhodecode
197 if not value:
197 if not value:
198 return value
198 return value
199
199
200 enc_strict_mode = rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True)
200 enc_strict_mode = rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True)
201
201
202 bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
202 bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
203
203
204 return safe_str(bytes_val)
204 return safe_str(bytes_val)
205
205
206
206
207 class BaseModel(object):
207 class BaseModel(object):
208 """
208 """
209 Base Model for all classes
209 Base Model for all classes
210 """
210 """
211
211
212 @classmethod
212 @classmethod
213 def _get_keys(cls):
213 def _get_keys(cls):
214 """return column names for this model """
214 """return column names for this model """
215 return class_mapper(cls).c.keys()
215 return class_mapper(cls).c.keys()
216
216
217 def get_dict(self):
217 def get_dict(self):
218 """
218 """
219 return dict with keys and values corresponding
219 return dict with keys and values corresponding
220 to this model data """
220 to this model data """
221
221
222 d = {}
222 d = {}
223 for k in self._get_keys():
223 for k in self._get_keys():
224 d[k] = getattr(self, k)
224 d[k] = getattr(self, k)
225
225
226 # also use __json__() if present to get additional fields
226 # also use __json__() if present to get additional fields
227 _json_attr = getattr(self, '__json__', None)
227 _json_attr = getattr(self, '__json__', None)
228 if _json_attr:
228 if _json_attr:
229 # update with attributes from __json__
229 # update with attributes from __json__
230 if callable(_json_attr):
230 if callable(_json_attr):
231 _json_attr = _json_attr()
231 _json_attr = _json_attr()
232 for k, val in _json_attr.items():
232 for k, val in _json_attr.items():
233 d[k] = val
233 d[k] = val
234 return d
234 return d
235
235
236 def get_appstruct(self):
236 def get_appstruct(self):
237 """return list with keys and values tuples corresponding
237 """return list with keys and values tuples corresponding
238 to this model data """
238 to this model data """
239
239
240 lst = []
240 lst = []
241 for k in self._get_keys():
241 for k in self._get_keys():
242 lst.append((k, getattr(self, k),))
242 lst.append((k, getattr(self, k),))
243 return lst
243 return lst
244
244
245 def populate_obj(self, populate_dict):
245 def populate_obj(self, populate_dict):
246 """populate model with data from given populate_dict"""
246 """populate model with data from given populate_dict"""
247
247
248 for k in self._get_keys():
248 for k in self._get_keys():
249 if k in populate_dict:
249 if k in populate_dict:
250 setattr(self, k, populate_dict[k])
250 setattr(self, k, populate_dict[k])
251
251
252 @classmethod
252 @classmethod
253 def query(cls):
253 def query(cls):
254 return Session().query(cls)
254 return Session().query(cls)
255
255
256 @classmethod
256 @classmethod
257 def select(cls, custom_cls=None):
257 def select(cls, custom_cls=None):
258 """
258 """
259 stmt = cls.select().where(cls.user_id==1)
259 stmt = cls.select().where(cls.user_id==1)
260 # optionally
260 # optionally
261 stmt = cls.select(User.user_id).where(cls.user_id==1)
261 stmt = cls.select(User.user_id).where(cls.user_id==1)
262 result = cls.execute(stmt) | cls.scalars(stmt)
262 result = cls.execute(stmt) | cls.scalars(stmt)
263 """
263 """
264
264
265 if custom_cls:
265 if custom_cls:
266 stmt = select(custom_cls)
266 stmt = select(custom_cls)
267 else:
267 else:
268 stmt = select(cls)
268 stmt = select(cls)
269 return stmt
269 return stmt
270
270
271 @classmethod
271 @classmethod
272 def execute(cls, stmt):
272 def execute(cls, stmt):
273 return Session().execute(stmt)
273 return Session().execute(stmt)
274
274
275 @classmethod
275 @classmethod
276 def scalars(cls, stmt):
276 def scalars(cls, stmt):
277 return Session().scalars(stmt)
277 return Session().scalars(stmt)
278
278
279 @classmethod
279 @classmethod
280 def get(cls, id_):
280 def get(cls, id_):
281 if id_:
281 if id_:
282 return cls.query().get(id_)
282 return cls.query().get(id_)
283
283
284 @classmethod
284 @classmethod
285 def get_or_404(cls, id_):
285 def get_or_404(cls, id_):
286 from pyramid.httpexceptions import HTTPNotFound
286 from pyramid.httpexceptions import HTTPNotFound
287
287
288 try:
288 try:
289 id_ = int(id_)
289 id_ = int(id_)
290 except (TypeError, ValueError):
290 except (TypeError, ValueError):
291 raise HTTPNotFound()
291 raise HTTPNotFound()
292
292
293 res = cls.query().get(id_)
293 res = cls.query().get(id_)
294 if not res:
294 if not res:
295 raise HTTPNotFound()
295 raise HTTPNotFound()
296 return res
296 return res
297
297
298 @classmethod
298 @classmethod
299 def getAll(cls):
299 def getAll(cls):
300 # deprecated and left for backward compatibility
300 # deprecated and left for backward compatibility
301 return cls.get_all()
301 return cls.get_all()
302
302
303 @classmethod
303 @classmethod
304 def get_all(cls):
304 def get_all(cls):
305 return cls.query().all()
305 return cls.query().all()
306
306
307 @classmethod
307 @classmethod
308 def delete(cls, id_):
308 def delete(cls, id_):
309 obj = cls.query().get(id_)
309 obj = cls.query().get(id_)
310 Session().delete(obj)
310 Session().delete(obj)
311
311
312 @classmethod
312 @classmethod
313 def identity_cache(cls, session, attr_name, value):
313 def identity_cache(cls, session, attr_name, value):
314 exist_in_session = []
314 exist_in_session = []
315 for (item_cls, pkey), instance in session.identity_map.items():
315 for (item_cls, pkey), instance in session.identity_map.items():
316 if cls == item_cls and getattr(instance, attr_name) == value:
316 if cls == item_cls and getattr(instance, attr_name) == value:
317 exist_in_session.append(instance)
317 exist_in_session.append(instance)
318 if exist_in_session:
318 if exist_in_session:
319 if len(exist_in_session) == 1:
319 if len(exist_in_session) == 1:
320 return exist_in_session[0]
320 return exist_in_session[0]
321 log.exception(
321 log.exception(
322 'multiple objects with attr %s and '
322 'multiple objects with attr %s and '
323 'value %s found with same name: %r',
323 'value %s found with same name: %r',
324 attr_name, value, exist_in_session)
324 attr_name, value, exist_in_session)
325
325
326 @property
326 @property
327 def cls_name(self):
327 def cls_name(self):
328 return self.__class__.__name__
328 return self.__class__.__name__
329
329
330 def __repr__(self):
330 def __repr__(self):
331 return f'<DB:{self.cls_name}>'
331 return f'<DB:{self.cls_name}>'
332
332
333
333
334 class RhodeCodeSetting(Base, BaseModel):
334 class RhodeCodeSetting(Base, BaseModel):
335 __tablename__ = 'rhodecode_settings'
335 __tablename__ = 'rhodecode_settings'
336 __table_args__ = (
336 __table_args__ = (
337 UniqueConstraint('app_settings_name'),
337 UniqueConstraint('app_settings_name'),
338 base_table_args
338 base_table_args
339 )
339 )
340
340
341 SETTINGS_TYPES = {
341 SETTINGS_TYPES = {
342 'str': safe_str,
342 'str': safe_str,
343 'int': safe_int,
343 'int': safe_int,
344 'unicode': safe_str,
344 'unicode': safe_str,
345 'bool': str2bool,
345 'bool': str2bool,
346 'list': functools.partial(aslist, sep=',')
346 'list': functools.partial(aslist, sep=',')
347 }
347 }
348 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
348 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
349 GLOBAL_CONF_KEY = 'app_settings'
349 GLOBAL_CONF_KEY = 'app_settings'
350
350
351 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
351 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
352 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
352 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
353 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
353 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
354 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
354 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
355
355
356 def __init__(self, key='', val='', type='unicode'):
356 def __init__(self, key='', val='', type='unicode'):
357 self.app_settings_name = key
357 self.app_settings_name = key
358 self.app_settings_type = type
358 self.app_settings_type = type
359 self.app_settings_value = val
359 self.app_settings_value = val
360
360
361 @validates('_app_settings_value')
361 @validates('_app_settings_value')
362 def validate_settings_value(self, key, val):
362 def validate_settings_value(self, key, val):
363 assert type(val) == str
363 assert type(val) == str
364 return val
364 return val
365
365
366 @hybrid_property
366 @hybrid_property
367 def app_settings_value(self):
367 def app_settings_value(self):
368 v = self._app_settings_value
368 v = self._app_settings_value
369 _type = self.app_settings_type
369 _type = self.app_settings_type
370 if _type:
370 if _type:
371 _type = self.app_settings_type.split('.')[0]
371 _type = self.app_settings_type.split('.')[0]
372 # decode the encrypted value
372 # decode the encrypted value
373 if 'encrypted' in self.app_settings_type:
373 if 'encrypted' in self.app_settings_type:
374 cipher = EncryptedTextValue()
374 cipher = EncryptedTextValue()
375 v = safe_str(cipher.process_result_value(v, None))
375 v = safe_str(cipher.process_result_value(v, None))
376
376
377 converter = self.SETTINGS_TYPES.get(_type) or \
377 converter = self.SETTINGS_TYPES.get(_type) or \
378 self.SETTINGS_TYPES['unicode']
378 self.SETTINGS_TYPES['unicode']
379 return converter(v)
379 return converter(v)
380
380
381 @app_settings_value.setter
381 @app_settings_value.setter
382 def app_settings_value(self, val):
382 def app_settings_value(self, val):
383 """
383 """
384 Setter that will always make sure we use unicode in app_settings_value
384 Setter that will always make sure we use unicode in app_settings_value
385
385
386 :param val:
386 :param val:
387 """
387 """
388 val = safe_str(val)
388 val = safe_str(val)
389 # encode the encrypted value
389 # encode the encrypted value
390 if 'encrypted' in self.app_settings_type:
390 if 'encrypted' in self.app_settings_type:
391 cipher = EncryptedTextValue()
391 cipher = EncryptedTextValue()
392 val = safe_str(cipher.process_bind_param(val, None))
392 val = safe_str(cipher.process_bind_param(val, None))
393 self._app_settings_value = val
393 self._app_settings_value = val
394
394
395 @hybrid_property
395 @hybrid_property
396 def app_settings_type(self):
396 def app_settings_type(self):
397 return self._app_settings_type
397 return self._app_settings_type
398
398
399 @app_settings_type.setter
399 @app_settings_type.setter
400 def app_settings_type(self, val):
400 def app_settings_type(self, val):
401 if val.split('.')[0] not in self.SETTINGS_TYPES:
401 if val.split('.')[0] not in self.SETTINGS_TYPES:
402 raise Exception('type must be one of %s got %s'
402 raise Exception('type must be one of %s got %s'
403 % (self.SETTINGS_TYPES.keys(), val))
403 % (self.SETTINGS_TYPES.keys(), val))
404 self._app_settings_type = val
404 self._app_settings_type = val
405
405
406 @classmethod
406 @classmethod
407 def get_by_prefix(cls, prefix):
407 def get_by_prefix(cls, prefix):
408 return RhodeCodeSetting.query()\
408 return RhodeCodeSetting.query()\
409 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
409 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
410 .all()
410 .all()
411
411
412 def __repr__(self):
412 def __repr__(self):
413 return "<%s('%s:%s[%s]')>" % (
413 return "<%s('%s:%s[%s]')>" % (
414 self.cls_name,
414 self.cls_name,
415 self.app_settings_name, self.app_settings_value,
415 self.app_settings_name, self.app_settings_value,
416 self.app_settings_type
416 self.app_settings_type
417 )
417 )
418
418
419
419
420 class RhodeCodeUi(Base, BaseModel):
420 class RhodeCodeUi(Base, BaseModel):
421 __tablename__ = 'rhodecode_ui'
421 __tablename__ = 'rhodecode_ui'
422 __table_args__ = (
422 __table_args__ = (
423 UniqueConstraint('ui_key'),
423 UniqueConstraint('ui_key'),
424 base_table_args
424 base_table_args
425 )
425 )
426 # Sync those values with vcsserver.config.hooks
426 # Sync those values with vcsserver.config.hooks
427
427
428 HOOK_REPO_SIZE = 'changegroup.repo_size'
428 HOOK_REPO_SIZE = 'changegroup.repo_size'
429 # HG
429 # HG
430 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
430 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
431 HOOK_PULL = 'outgoing.pull_logger'
431 HOOK_PULL = 'outgoing.pull_logger'
432 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
432 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
433 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
433 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
434 HOOK_PUSH = 'changegroup.push_logger'
434 HOOK_PUSH = 'changegroup.push_logger'
435 HOOK_PUSH_KEY = 'pushkey.key_push'
435 HOOK_PUSH_KEY = 'pushkey.key_push'
436
436
437 HOOKS_BUILTIN = [
437 HOOKS_BUILTIN = [
438 HOOK_PRE_PULL,
438 HOOK_PRE_PULL,
439 HOOK_PULL,
439 HOOK_PULL,
440 HOOK_PRE_PUSH,
440 HOOK_PRE_PUSH,
441 HOOK_PRETX_PUSH,
441 HOOK_PRETX_PUSH,
442 HOOK_PUSH,
442 HOOK_PUSH,
443 HOOK_PUSH_KEY,
443 HOOK_PUSH_KEY,
444 ]
444 ]
445
445
446 # TODO: johbo: Unify way how hooks are configured for git and hg,
446 # TODO: johbo: Unify way how hooks are configured for git and hg,
447 # git part is currently hardcoded.
447 # git part is currently hardcoded.
448
448
449 # SVN PATTERNS
449 # SVN PATTERNS
450 SVN_BRANCH_ID = 'vcs_svn_branch'
450 SVN_BRANCH_ID = 'vcs_svn_branch'
451 SVN_TAG_ID = 'vcs_svn_tag'
451 SVN_TAG_ID = 'vcs_svn_tag'
452
452
453 ui_id = Column(
453 ui_id = Column(
454 "ui_id", Integer(), nullable=False, unique=True, default=None,
454 "ui_id", Integer(), nullable=False, unique=True, default=None,
455 primary_key=True)
455 primary_key=True)
456 ui_section = Column(
456 ui_section = Column(
457 "ui_section", String(255), nullable=True, unique=None, default=None)
457 "ui_section", String(255), nullable=True, unique=None, default=None)
458 ui_key = Column(
458 ui_key = Column(
459 "ui_key", String(255), nullable=True, unique=None, default=None)
459 "ui_key", String(255), nullable=True, unique=None, default=None)
460 ui_value = Column(
460 ui_value = Column(
461 "ui_value", String(255), nullable=True, unique=None, default=None)
461 "ui_value", String(255), nullable=True, unique=None, default=None)
462 ui_active = Column(
462 ui_active = Column(
463 "ui_active", Boolean(), nullable=True, unique=None, default=True)
463 "ui_active", Boolean(), nullable=True, unique=None, default=True)
464
464
465 def __repr__(self):
465 def __repr__(self):
466 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section,
466 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section,
467 self.ui_key, self.ui_value)
467 self.ui_key, self.ui_value)
468
468
469
469
470 class RepoRhodeCodeSetting(Base, BaseModel):
470 class RepoRhodeCodeSetting(Base, BaseModel):
471 __tablename__ = 'repo_rhodecode_settings'
471 __tablename__ = 'repo_rhodecode_settings'
472 __table_args__ = (
472 __table_args__ = (
473 UniqueConstraint(
473 UniqueConstraint(
474 'app_settings_name', 'repository_id',
474 'app_settings_name', 'repository_id',
475 name='uq_repo_rhodecode_setting_name_repo_id'),
475 name='uq_repo_rhodecode_setting_name_repo_id'),
476 base_table_args
476 base_table_args
477 )
477 )
478
478
479 repository_id = Column(
479 repository_id = Column(
480 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
480 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
481 nullable=False)
481 nullable=False)
482 app_settings_id = Column(
482 app_settings_id = Column(
483 "app_settings_id", Integer(), nullable=False, unique=True,
483 "app_settings_id", Integer(), nullable=False, unique=True,
484 default=None, primary_key=True)
484 default=None, primary_key=True)
485 app_settings_name = Column(
485 app_settings_name = Column(
486 "app_settings_name", String(255), nullable=True, unique=None,
486 "app_settings_name", String(255), nullable=True, unique=None,
487 default=None)
487 default=None)
488 _app_settings_value = Column(
488 _app_settings_value = Column(
489 "app_settings_value", String(4096), nullable=True, unique=None,
489 "app_settings_value", String(4096), nullable=True, unique=None,
490 default=None)
490 default=None)
491 _app_settings_type = Column(
491 _app_settings_type = Column(
492 "app_settings_type", String(255), nullable=True, unique=None,
492 "app_settings_type", String(255), nullable=True, unique=None,
493 default=None)
493 default=None)
494
494
495 repository = relationship('Repository', viewonly=True)
495 repository = relationship('Repository', viewonly=True)
496
496
497 def __init__(self, repository_id, key='', val='', type='unicode'):
497 def __init__(self, repository_id, key='', val='', type='unicode'):
498 self.repository_id = repository_id
498 self.repository_id = repository_id
499 self.app_settings_name = key
499 self.app_settings_name = key
500 self.app_settings_type = type
500 self.app_settings_type = type
501 self.app_settings_value = val
501 self.app_settings_value = val
502
502
503 @validates('_app_settings_value')
503 @validates('_app_settings_value')
504 def validate_settings_value(self, key, val):
504 def validate_settings_value(self, key, val):
505 assert type(val) == str
505 assert type(val) == str
506 return val
506 return val
507
507
508 @hybrid_property
508 @hybrid_property
509 def app_settings_value(self):
509 def app_settings_value(self):
510 v = self._app_settings_value
510 v = self._app_settings_value
511 type_ = self.app_settings_type
511 type_ = self.app_settings_type
512 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
512 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
513 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
513 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
514 return converter(v)
514 return converter(v)
515
515
516 @app_settings_value.setter
516 @app_settings_value.setter
517 def app_settings_value(self, val):
517 def app_settings_value(self, val):
518 """
518 """
519 Setter that will always make sure we use unicode in app_settings_value
519 Setter that will always make sure we use unicode in app_settings_value
520
520
521 :param val:
521 :param val:
522 """
522 """
523 self._app_settings_value = safe_str(val)
523 self._app_settings_value = safe_str(val)
524
524
525 @hybrid_property
525 @hybrid_property
526 def app_settings_type(self):
526 def app_settings_type(self):
527 return self._app_settings_type
527 return self._app_settings_type
528
528
529 @app_settings_type.setter
529 @app_settings_type.setter
530 def app_settings_type(self, val):
530 def app_settings_type(self, val):
531 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
531 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
532 if val not in SETTINGS_TYPES:
532 if val not in SETTINGS_TYPES:
533 raise Exception('type must be one of %s got %s'
533 raise Exception('type must be one of %s got %s'
534 % (SETTINGS_TYPES.keys(), val))
534 % (SETTINGS_TYPES.keys(), val))
535 self._app_settings_type = val
535 self._app_settings_type = val
536
536
537 def __repr__(self):
537 def __repr__(self):
538 return "<%s('%s:%s:%s[%s]')>" % (
538 return "<%s('%s:%s:%s[%s]')>" % (
539 self.cls_name, self.repository.repo_name,
539 self.cls_name, self.repository.repo_name,
540 self.app_settings_name, self.app_settings_value,
540 self.app_settings_name, self.app_settings_value,
541 self.app_settings_type
541 self.app_settings_type
542 )
542 )
543
543
544
544
545 class RepoRhodeCodeUi(Base, BaseModel):
545 class RepoRhodeCodeUi(Base, BaseModel):
546 __tablename__ = 'repo_rhodecode_ui'
546 __tablename__ = 'repo_rhodecode_ui'
547 __table_args__ = (
547 __table_args__ = (
548 UniqueConstraint(
548 UniqueConstraint(
549 'repository_id', 'ui_section', 'ui_key',
549 'repository_id', 'ui_section', 'ui_key',
550 name='uq_repo_rhodecode_ui_repository_id_section_key'),
550 name='uq_repo_rhodecode_ui_repository_id_section_key'),
551 base_table_args
551 base_table_args
552 )
552 )
553
553
554 repository_id = Column(
554 repository_id = Column(
555 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
555 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
556 nullable=False)
556 nullable=False)
557 ui_id = Column(
557 ui_id = Column(
558 "ui_id", Integer(), nullable=False, unique=True, default=None,
558 "ui_id", Integer(), nullable=False, unique=True, default=None,
559 primary_key=True)
559 primary_key=True)
560 ui_section = Column(
560 ui_section = Column(
561 "ui_section", String(255), nullable=True, unique=None, default=None)
561 "ui_section", String(255), nullable=True, unique=None, default=None)
562 ui_key = Column(
562 ui_key = Column(
563 "ui_key", String(255), nullable=True, unique=None, default=None)
563 "ui_key", String(255), nullable=True, unique=None, default=None)
564 ui_value = Column(
564 ui_value = Column(
565 "ui_value", String(255), nullable=True, unique=None, default=None)
565 "ui_value", String(255), nullable=True, unique=None, default=None)
566 ui_active = Column(
566 ui_active = Column(
567 "ui_active", Boolean(), nullable=True, unique=None, default=True)
567 "ui_active", Boolean(), nullable=True, unique=None, default=True)
568
568
569 repository = relationship('Repository', viewonly=True)
569 repository = relationship('Repository', viewonly=True)
570
570
571 def __repr__(self):
571 def __repr__(self):
572 return '<%s[%s:%s]%s=>%s]>' % (
572 return '<%s[%s:%s]%s=>%s]>' % (
573 self.cls_name, self.repository.repo_name,
573 self.cls_name, self.repository.repo_name,
574 self.ui_section, self.ui_key, self.ui_value)
574 self.ui_section, self.ui_key, self.ui_value)
575
575
576
576
577 class User(Base, BaseModel):
577 class User(Base, BaseModel):
578 __tablename__ = 'users'
578 __tablename__ = 'users'
579 __table_args__ = (
579 __table_args__ = (
580 UniqueConstraint('username'), UniqueConstraint('email'),
580 UniqueConstraint('username'), UniqueConstraint('email'),
581 Index('u_username_idx', 'username'),
581 Index('u_username_idx', 'username'),
582 Index('u_email_idx', 'email'),
582 Index('u_email_idx', 'email'),
583 base_table_args
583 base_table_args
584 )
584 )
585
585
586 DEFAULT_USER = 'default'
586 DEFAULT_USER = 'default'
587 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
587 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
588 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
588 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
589
589
590 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
590 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
591 username = Column("username", String(255), nullable=True, unique=None, default=None)
591 username = Column("username", String(255), nullable=True, unique=None, default=None)
592 password = Column("password", String(255), nullable=True, unique=None, default=None)
592 password = Column("password", String(255), nullable=True, unique=None, default=None)
593 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
593 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
594 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
594 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
595 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
595 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
596 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
596 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
597 _email = Column("email", String(255), nullable=True, unique=None, default=None)
597 _email = Column("email", String(255), nullable=True, unique=None, default=None)
598 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
598 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
599 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
599 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
600 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
600 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
601
601
602 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
602 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
603 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
603 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
604 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
604 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
605 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
605 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
606 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
606 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
607 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
607 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
608
608
609 user_log = relationship('UserLog', back_populates='user')
609 user_log = relationship('UserLog', back_populates='user')
610 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
610 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
611
611
612 repositories = relationship('Repository', back_populates='user')
612 repositories = relationship('Repository', back_populates='user')
613 repository_groups = relationship('RepoGroup', back_populates='user')
613 repository_groups = relationship('RepoGroup', back_populates='user')
614 user_groups = relationship('UserGroup', back_populates='user')
614 user_groups = relationship('UserGroup', back_populates='user')
615
615
616 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user')
616 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user')
617 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user')
617 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user')
618
618
619 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
619 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
620 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
620 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
621 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
621 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
622
622
623 group_member = relationship('UserGroupMember', cascade='all', back_populates='user')
623 group_member = relationship('UserGroupMember', cascade='all', back_populates='user')
624
624
625 notifications = relationship('UserNotification', cascade='all', back_populates='user')
625 notifications = relationship('UserNotification', cascade='all', back_populates='user')
626 # notifications assigned to this user
626 # notifications assigned to this user
627 user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user')
627 user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user')
628 # comments created by this user
628 # comments created by this user
629 user_comments = relationship('ChangesetComment', cascade='all', back_populates='author')
629 user_comments = relationship('ChangesetComment', cascade='all', back_populates='author')
630 # user profile extra info
630 # user profile extra info
631 user_emails = relationship('UserEmailMap', cascade='all', back_populates='user')
631 user_emails = relationship('UserEmailMap', cascade='all', back_populates='user')
632 user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user')
632 user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user')
633 user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user')
633 user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user')
634 user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user')
634 user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user')
635
635
636 # gists
636 # gists
637 user_gists = relationship('Gist', cascade='all', back_populates='owner')
637 user_gists = relationship('Gist', cascade='all', back_populates='owner')
638 # user pull requests
638 # user pull requests
639 user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author')
639 user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author')
640
640
641 # external identities
641 # external identities
642 external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all')
642 external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all')
643 # review rules
643 # review rules
644 user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user')
644 user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user')
645
645
646 # artifacts owned
646 # artifacts owned
647 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user')
647 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user')
648
648
649 # no cascade, set NULL
649 # no cascade, set NULL
650 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user')
650 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user')
651
651
652 def __repr__(self):
652 def __repr__(self):
653 return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>"
653 return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>"
654
654
655 @hybrid_property
655 @hybrid_property
656 def email(self):
656 def email(self):
657 return self._email
657 return self._email
658
658
659 @email.setter
659 @email.setter
660 def email(self, val):
660 def email(self, val):
661 self._email = val.lower() if val else None
661 self._email = val.lower() if val else None
662
662
663 @hybrid_property
663 @hybrid_property
664 def first_name(self):
664 def first_name(self):
665 from rhodecode.lib import helpers as h
665 from rhodecode.lib import helpers as h
666 if self.name:
666 if self.name:
667 return h.escape(self.name)
667 return h.escape(self.name)
668 return self.name
668 return self.name
669
669
670 @hybrid_property
670 @hybrid_property
671 def last_name(self):
671 def last_name(self):
672 from rhodecode.lib import helpers as h
672 from rhodecode.lib import helpers as h
673 if self.lastname:
673 if self.lastname:
674 return h.escape(self.lastname)
674 return h.escape(self.lastname)
675 return self.lastname
675 return self.lastname
676
676
677 @hybrid_property
677 @hybrid_property
678 def api_key(self):
678 def api_key(self):
679 """
679 """
680 Fetch if exist an auth-token with role ALL connected to this user
680 Fetch if exist an auth-token with role ALL connected to this user
681 """
681 """
682 user_auth_token = UserApiKeys.query()\
682 user_auth_token = UserApiKeys.query()\
683 .filter(UserApiKeys.user_id == self.user_id)\
683 .filter(UserApiKeys.user_id == self.user_id)\
684 .filter(or_(UserApiKeys.expires == -1,
684 .filter(or_(UserApiKeys.expires == -1,
685 UserApiKeys.expires >= time.time()))\
685 UserApiKeys.expires >= time.time()))\
686 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
686 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
687 if user_auth_token:
687 if user_auth_token:
688 user_auth_token = user_auth_token.api_key
688 user_auth_token = user_auth_token.api_key
689
689
690 return user_auth_token
690 return user_auth_token
691
691
692 @api_key.setter
692 @api_key.setter
693 def api_key(self, val):
693 def api_key(self, val):
694 # don't allow to set API key this is deprecated for now
694 # don't allow to set API key this is deprecated for now
695 self._api_key = None
695 self._api_key = None
696
696
697 @property
697 @property
698 def reviewer_pull_requests(self):
698 def reviewer_pull_requests(self):
699 return PullRequestReviewers.query() \
699 return PullRequestReviewers.query() \
700 .options(joinedload(PullRequestReviewers.pull_request)) \
700 .options(joinedload(PullRequestReviewers.pull_request)) \
701 .filter(PullRequestReviewers.user_id == self.user_id) \
701 .filter(PullRequestReviewers.user_id == self.user_id) \
702 .all()
702 .all()
703
703
704 @property
704 @property
705 def firstname(self):
705 def firstname(self):
706 # alias for future
706 # alias for future
707 return self.name
707 return self.name
708
708
709 @property
709 @property
710 def emails(self):
710 def emails(self):
711 other = UserEmailMap.query()\
711 other = UserEmailMap.query()\
712 .filter(UserEmailMap.user == self) \
712 .filter(UserEmailMap.user == self) \
713 .order_by(UserEmailMap.email_id.asc()) \
713 .order_by(UserEmailMap.email_id.asc()) \
714 .all()
714 .all()
715 return [self.email] + [x.email for x in other]
715 return [self.email] + [x.email for x in other]
716
716
717 def emails_cached(self):
717 def emails_cached(self):
718 emails = []
718 emails = []
719 if self.user_id != self.get_default_user_id():
719 if self.user_id != self.get_default_user_id():
720 emails = UserEmailMap.query()\
720 emails = UserEmailMap.query()\
721 .filter(UserEmailMap.user == self) \
721 .filter(UserEmailMap.user == self) \
722 .order_by(UserEmailMap.email_id.asc())
722 .order_by(UserEmailMap.email_id.asc())
723
723
724 emails = emails.options(
724 emails = emails.options(
725 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
725 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
726 )
726 )
727
727
728 return [self.email] + [x.email for x in emails]
728 return [self.email] + [x.email for x in emails]
729
729
730 @property
730 @property
731 def auth_tokens(self):
731 def auth_tokens(self):
732 auth_tokens = self.get_auth_tokens()
732 auth_tokens = self.get_auth_tokens()
733 return [x.api_key for x in auth_tokens]
733 return [x.api_key for x in auth_tokens]
734
734
735 def get_auth_tokens(self):
735 def get_auth_tokens(self):
736 return UserApiKeys.query()\
736 return UserApiKeys.query()\
737 .filter(UserApiKeys.user == self)\
737 .filter(UserApiKeys.user == self)\
738 .order_by(UserApiKeys.user_api_key_id.asc())\
738 .order_by(UserApiKeys.user_api_key_id.asc())\
739 .all()
739 .all()
740
740
741 @LazyProperty
741 @LazyProperty
742 def feed_token(self):
742 def feed_token(self):
743 return self.get_feed_token()
743 return self.get_feed_token()
744
744
745 def get_feed_token(self, cache=True):
745 def get_feed_token(self, cache=True):
746 feed_tokens = UserApiKeys.query()\
746 feed_tokens = UserApiKeys.query()\
747 .filter(UserApiKeys.user == self)\
747 .filter(UserApiKeys.user == self)\
748 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
748 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
749 if cache:
749 if cache:
750 feed_tokens = feed_tokens.options(
750 feed_tokens = feed_tokens.options(
751 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
751 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
752
752
753 feed_tokens = feed_tokens.all()
753 feed_tokens = feed_tokens.all()
754 if feed_tokens:
754 if feed_tokens:
755 return feed_tokens[0].api_key
755 return feed_tokens[0].api_key
756 return 'NO_FEED_TOKEN_AVAILABLE'
756 return 'NO_FEED_TOKEN_AVAILABLE'
757
757
758 @LazyProperty
758 @LazyProperty
759 def artifact_token(self):
759 def artifact_token(self):
760 return self.get_artifact_token()
760 return self.get_artifact_token()
761
761
762 def get_artifact_token(self, cache=True):
762 def get_artifact_token(self, cache=True):
763 artifacts_tokens = UserApiKeys.query()\
763 artifacts_tokens = UserApiKeys.query()\
764 .filter(UserApiKeys.user == self) \
764 .filter(UserApiKeys.user == self) \
765 .filter(or_(UserApiKeys.expires == -1,
765 .filter(or_(UserApiKeys.expires == -1,
766 UserApiKeys.expires >= time.time())) \
766 UserApiKeys.expires >= time.time())) \
767 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
767 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
768
768
769 if cache:
769 if cache:
770 artifacts_tokens = artifacts_tokens.options(
770 artifacts_tokens = artifacts_tokens.options(
771 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
771 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
772
772
773 artifacts_tokens = artifacts_tokens.all()
773 artifacts_tokens = artifacts_tokens.all()
774 if artifacts_tokens:
774 if artifacts_tokens:
775 return artifacts_tokens[0].api_key
775 return artifacts_tokens[0].api_key
776 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
776 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
777
777
778 def get_or_create_artifact_token(self):
778 def get_or_create_artifact_token(self):
779 artifacts_tokens = UserApiKeys.query()\
779 artifacts_tokens = UserApiKeys.query()\
780 .filter(UserApiKeys.user == self) \
780 .filter(UserApiKeys.user == self) \
781 .filter(or_(UserApiKeys.expires == -1,
781 .filter(or_(UserApiKeys.expires == -1,
782 UserApiKeys.expires >= time.time())) \
782 UserApiKeys.expires >= time.time())) \
783 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
783 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
784
784
785 artifacts_tokens = artifacts_tokens.all()
785 artifacts_tokens = artifacts_tokens.all()
786 if artifacts_tokens:
786 if artifacts_tokens:
787 return artifacts_tokens[0].api_key
787 return artifacts_tokens[0].api_key
788 else:
788 else:
789 from rhodecode.model.auth_token import AuthTokenModel
789 from rhodecode.model.auth_token import AuthTokenModel
790 artifact_token = AuthTokenModel().create(
790 artifact_token = AuthTokenModel().create(
791 self, 'auto-generated-artifact-token',
791 self, 'auto-generated-artifact-token',
792 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
792 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
793 Session.commit()
793 Session.commit()
794 return artifact_token.api_key
794 return artifact_token.api_key
795
795
796 @classmethod
796 @classmethod
797 def get(cls, user_id, cache=False):
797 def get(cls, user_id, cache=False):
798 if not user_id:
798 if not user_id:
799 return
799 return
800
800
801 user = cls.query()
801 user = cls.query()
802 if cache:
802 if cache:
803 user = user.options(
803 user = user.options(
804 FromCache("sql_cache_short", f"get_users_{user_id}"))
804 FromCache("sql_cache_short", f"get_users_{user_id}"))
805 return user.get(user_id)
805 return user.get(user_id)
806
806
807 @classmethod
807 @classmethod
808 def extra_valid_auth_tokens(cls, user, role=None):
808 def extra_valid_auth_tokens(cls, user, role=None):
809 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
809 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
810 .filter(or_(UserApiKeys.expires == -1,
810 .filter(or_(UserApiKeys.expires == -1,
811 UserApiKeys.expires >= time.time()))
811 UserApiKeys.expires >= time.time()))
812 if role:
812 if role:
813 tokens = tokens.filter(or_(UserApiKeys.role == role,
813 tokens = tokens.filter(or_(UserApiKeys.role == role,
814 UserApiKeys.role == UserApiKeys.ROLE_ALL))
814 UserApiKeys.role == UserApiKeys.ROLE_ALL))
815 return tokens.all()
815 return tokens.all()
816
816
817 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
817 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
818 from rhodecode.lib import auth
818 from rhodecode.lib import auth
819
819
820 log.debug('Trying to authenticate user: %s via auth-token, '
820 log.debug('Trying to authenticate user: %s via auth-token, '
821 'and roles: %s', self, roles)
821 'and roles: %s', self, roles)
822
822
823 if not auth_token:
823 if not auth_token:
824 return False
824 return False
825
825
826 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
826 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
827 tokens_q = UserApiKeys.query()\
827 tokens_q = UserApiKeys.query()\
828 .filter(UserApiKeys.user_id == self.user_id)\
828 .filter(UserApiKeys.user_id == self.user_id)\
829 .filter(or_(UserApiKeys.expires == -1,
829 .filter(or_(UserApiKeys.expires == -1,
830 UserApiKeys.expires >= time.time()))
830 UserApiKeys.expires >= time.time()))
831
831
832 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
832 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
833
833
834 crypto_backend = auth.crypto_backend()
834 crypto_backend = auth.crypto_backend()
835 enc_token_map = {}
835 enc_token_map = {}
836 plain_token_map = {}
836 plain_token_map = {}
837 for token in tokens_q:
837 for token in tokens_q:
838 if token.api_key.startswith(crypto_backend.ENC_PREF):
838 if token.api_key.startswith(crypto_backend.ENC_PREF):
839 enc_token_map[token.api_key] = token
839 enc_token_map[token.api_key] = token
840 else:
840 else:
841 plain_token_map[token.api_key] = token
841 plain_token_map[token.api_key] = token
842 log.debug(
842 log.debug(
843 'Found %s plain and %s encrypted tokens to check for authentication for this user',
843 'Found %s plain and %s encrypted tokens to check for authentication for this user',
844 len(plain_token_map), len(enc_token_map))
844 len(plain_token_map), len(enc_token_map))
845
845
846 # plain token match comes first
846 # plain token match comes first
847 match = plain_token_map.get(auth_token)
847 match = plain_token_map.get(auth_token)
848
848
849 # check encrypted tokens now
849 # check encrypted tokens now
850 if not match:
850 if not match:
851 for token_hash, token in enc_token_map.items():
851 for token_hash, token in enc_token_map.items():
852 # NOTE(marcink): this is expensive to calculate, but most secure
852 # NOTE(marcink): this is expensive to calculate, but most secure
853 if crypto_backend.hash_check(auth_token, token_hash):
853 if crypto_backend.hash_check(auth_token, token_hash):
854 match = token
854 match = token
855 break
855 break
856
856
857 if match:
857 if match:
858 log.debug('Found matching token %s', match)
858 log.debug('Found matching token %s', match)
859 if match.repo_id:
859 if match.repo_id:
860 log.debug('Found scope, checking for scope match of token %s', match)
860 log.debug('Found scope, checking for scope match of token %s', match)
861 if match.repo_id == scope_repo_id:
861 if match.repo_id == scope_repo_id:
862 return True
862 return True
863 else:
863 else:
864 log.debug(
864 log.debug(
865 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
865 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
866 'and calling scope is:%s, skipping further checks',
866 'and calling scope is:%s, skipping further checks',
867 match.repo, scope_repo_id)
867 match.repo, scope_repo_id)
868 return False
868 return False
869 else:
869 else:
870 return True
870 return True
871
871
872 return False
872 return False
873
873
874 @property
874 @property
875 def ip_addresses(self):
875 def ip_addresses(self):
876 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
876 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
877 return [x.ip_addr for x in ret]
877 return [x.ip_addr for x in ret]
878
878
879 @property
879 @property
880 def username_and_name(self):
880 def username_and_name(self):
881 return f'{self.username} ({self.first_name} {self.last_name})'
881 return f'{self.username} ({self.first_name} {self.last_name})'
882
882
883 @property
883 @property
884 def username_or_name_or_email(self):
884 def username_or_name_or_email(self):
885 full_name = self.full_name if self.full_name != ' ' else None
885 full_name = self.full_name if self.full_name != ' ' else None
886 return self.username or full_name or self.email
886 return self.username or full_name or self.email
887
887
888 @property
888 @property
889 def full_name(self):
889 def full_name(self):
890 return f'{self.first_name} {self.last_name}'
890 return f'{self.first_name} {self.last_name}'
891
891
892 @property
892 @property
893 def full_name_or_username(self):
893 def full_name_or_username(self):
894 return (f'{self.first_name} {self.last_name}'
894 return (f'{self.first_name} {self.last_name}'
895 if (self.first_name and self.last_name) else self.username)
895 if (self.first_name and self.last_name) else self.username)
896
896
897 @property
897 @property
898 def full_contact(self):
898 def full_contact(self):
899 return f'{self.first_name} {self.last_name} <{self.email}>'
899 return f'{self.first_name} {self.last_name} <{self.email}>'
900
900
901 @property
901 @property
902 def short_contact(self):
902 def short_contact(self):
903 return f'{self.first_name} {self.last_name}'
903 return f'{self.first_name} {self.last_name}'
904
904
905 @property
905 @property
906 def is_admin(self):
906 def is_admin(self):
907 return self.admin
907 return self.admin
908
908
909 @property
909 @property
910 def language(self):
910 def language(self):
911 return self.user_data.get('language')
911 return self.user_data.get('language')
912
912
913 def AuthUser(self, **kwargs):
913 def AuthUser(self, **kwargs):
914 """
914 """
915 Returns instance of AuthUser for this user
915 Returns instance of AuthUser for this user
916 """
916 """
917 from rhodecode.lib.auth import AuthUser
917 from rhodecode.lib.auth import AuthUser
918 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
918 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
919
919
920 @hybrid_property
920 @hybrid_property
921 def user_data(self):
921 def user_data(self):
922 if not self._user_data:
922 if not self._user_data:
923 return {}
923 return {}
924
924
925 try:
925 try:
926 return json.loads(self._user_data) or {}
926 return json.loads(self._user_data) or {}
927 except TypeError:
927 except TypeError:
928 return {}
928 return {}
929
929
930 @user_data.setter
930 @user_data.setter
931 def user_data(self, val):
931 def user_data(self, val):
932 if not isinstance(val, dict):
932 if not isinstance(val, dict):
933 raise Exception('user_data must be dict, got %s' % type(val))
933 raise Exception('user_data must be dict, got %s' % type(val))
934 try:
934 try:
935 self._user_data = safe_bytes(json.dumps(val))
935 self._user_data = safe_bytes(json.dumps(val))
936 except Exception:
936 except Exception:
937 log.error(traceback.format_exc())
937 log.error(traceback.format_exc())
938
938
939 @classmethod
939 @classmethod
940 def get_by_username(cls, username, case_insensitive=False,
940 def get_by_username(cls, username, case_insensitive=False,
941 cache=False):
941 cache=False):
942
942
943 if case_insensitive:
943 if case_insensitive:
944 q = cls.select().where(
944 q = cls.select().where(
945 func.lower(cls.username) == func.lower(username))
945 func.lower(cls.username) == func.lower(username))
946 else:
946 else:
947 q = cls.select().where(cls.username == username)
947 q = cls.select().where(cls.username == username)
948
948
949 if cache:
949 if cache:
950 hash_key = _hash_key(username)
950 hash_key = _hash_key(username)
951 q = q.options(
951 q = q.options(
952 FromCache("sql_cache_short", f"get_user_by_name_{hash_key}"))
952 FromCache("sql_cache_short", f"get_user_by_name_{hash_key}"))
953
953
954 return cls.execute(q).scalar_one_or_none()
954 return cls.execute(q).scalar_one_or_none()
955
955
956 @classmethod
956 @classmethod
957 def get_by_auth_token(cls, auth_token, cache=False):
957 def get_by_auth_token(cls, auth_token, cache=False):
958
958
959 q = cls.select(User)\
959 q = cls.select(User)\
960 .join(UserApiKeys)\
960 .join(UserApiKeys)\
961 .where(UserApiKeys.api_key == auth_token)\
961 .where(UserApiKeys.api_key == auth_token)\
962 .where(or_(UserApiKeys.expires == -1,
962 .where(or_(UserApiKeys.expires == -1,
963 UserApiKeys.expires >= time.time()))
963 UserApiKeys.expires >= time.time()))
964
964
965 if cache:
965 if cache:
966 q = q.options(
966 q = q.options(
967 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
967 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
968
968
969 matched_user = cls.execute(q).scalar_one_or_none()
969 matched_user = cls.execute(q).scalar_one_or_none()
970
970
971 return matched_user
971 return matched_user
972
972
973 @classmethod
973 @classmethod
974 def get_by_email(cls, email, case_insensitive=False, cache=False):
974 def get_by_email(cls, email, case_insensitive=False, cache=False):
975
975
976 if case_insensitive:
976 if case_insensitive:
977 q = cls.select().where(func.lower(cls.email) == func.lower(email))
977 q = cls.select().where(func.lower(cls.email) == func.lower(email))
978 else:
978 else:
979 q = cls.select().where(cls.email == email)
979 q = cls.select().where(cls.email == email)
980
980
981 if cache:
981 if cache:
982 email_key = _hash_key(email)
982 email_key = _hash_key(email)
983 q = q.options(
983 q = q.options(
984 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
984 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
985
985
986 ret = cls.execute(q).scalar_one_or_none()
986 ret = cls.execute(q).scalar_one_or_none()
987
987
988 if ret is None:
988 if ret is None:
989 q = cls.select(UserEmailMap)
989 q = cls.select(UserEmailMap)
990 # try fetching in alternate email map
990 # try fetching in alternate email map
991 if case_insensitive:
991 if case_insensitive:
992 q = q.where(func.lower(UserEmailMap.email) == func.lower(email))
992 q = q.where(func.lower(UserEmailMap.email) == func.lower(email))
993 else:
993 else:
994 q = q.where(UserEmailMap.email == email)
994 q = q.where(UserEmailMap.email == email)
995 q = q.options(joinedload(UserEmailMap.user))
995 q = q.options(joinedload(UserEmailMap.user))
996 if cache:
996 if cache:
997 q = q.options(
997 q = q.options(
998 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
998 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
999
999
1000 result = cls.execute(q).scalar_one_or_none()
1000 result = cls.execute(q).scalar_one_or_none()
1001 ret = getattr(result, 'user', None)
1001 ret = getattr(result, 'user', None)
1002
1002
1003 return ret
1003 return ret
1004
1004
1005 @classmethod
1005 @classmethod
1006 def get_from_cs_author(cls, author):
1006 def get_from_cs_author(cls, author):
1007 """
1007 """
1008 Tries to get User objects out of commit author string
1008 Tries to get User objects out of commit author string
1009
1009
1010 :param author:
1010 :param author:
1011 """
1011 """
1012 from rhodecode.lib.helpers import email, author_name
1012 from rhodecode.lib.helpers import email, author_name
1013 # Valid email in the attribute passed, see if they're in the system
1013 # Valid email in the attribute passed, see if they're in the system
1014 _email = email(author)
1014 _email = email(author)
1015 if _email:
1015 if _email:
1016 user = cls.get_by_email(_email, case_insensitive=True)
1016 user = cls.get_by_email(_email, case_insensitive=True)
1017 if user:
1017 if user:
1018 return user
1018 return user
1019 # Maybe we can match by username?
1019 # Maybe we can match by username?
1020 _author = author_name(author)
1020 _author = author_name(author)
1021 user = cls.get_by_username(_author, case_insensitive=True)
1021 user = cls.get_by_username(_author, case_insensitive=True)
1022 if user:
1022 if user:
1023 return user
1023 return user
1024
1024
1025 def update_userdata(self, **kwargs):
1025 def update_userdata(self, **kwargs):
1026 usr = self
1026 usr = self
1027 old = usr.user_data
1027 old = usr.user_data
1028 old.update(**kwargs)
1028 old.update(**kwargs)
1029 usr.user_data = old
1029 usr.user_data = old
1030 Session().add(usr)
1030 Session().add(usr)
1031 log.debug('updated userdata with %s', kwargs)
1031 log.debug('updated userdata with %s', kwargs)
1032
1032
1033 def update_lastlogin(self):
1033 def update_lastlogin(self):
1034 """Update user lastlogin"""
1034 """Update user lastlogin"""
1035 self.last_login = datetime.datetime.now()
1035 self.last_login = datetime.datetime.now()
1036 Session().add(self)
1036 Session().add(self)
1037 log.debug('updated user %s lastlogin', self.username)
1037 log.debug('updated user %s lastlogin', self.username)
1038
1038
1039 def update_password(self, new_password):
1039 def update_password(self, new_password):
1040 from rhodecode.lib.auth import get_crypt_password
1040 from rhodecode.lib.auth import get_crypt_password
1041
1041
1042 self.password = get_crypt_password(new_password)
1042 self.password = get_crypt_password(new_password)
1043 Session().add(self)
1043 Session().add(self)
1044
1044
1045 @classmethod
1045 @classmethod
1046 def get_first_super_admin(cls):
1046 def get_first_super_admin(cls):
1047 stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc())
1047 stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc())
1048 user = cls.scalars(stmt).first()
1048 user = cls.scalars(stmt).first()
1049
1049
1050 if user is None:
1050 if user is None:
1051 raise Exception('FATAL: Missing administrative account!')
1051 raise Exception('FATAL: Missing administrative account!')
1052 return user
1052 return user
1053
1053
1054 @classmethod
1054 @classmethod
1055 def get_all_super_admins(cls, only_active=False):
1055 def get_all_super_admins(cls, only_active=False):
1056 """
1056 """
1057 Returns all admin accounts sorted by username
1057 Returns all admin accounts sorted by username
1058 """
1058 """
1059 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1059 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1060 if only_active:
1060 if only_active:
1061 qry = qry.filter(User.active == true())
1061 qry = qry.filter(User.active == true())
1062 return qry.all()
1062 return qry.all()
1063
1063
1064 @classmethod
1064 @classmethod
1065 def get_all_user_ids(cls, only_active=True):
1065 def get_all_user_ids(cls, only_active=True):
1066 """
1066 """
1067 Returns all users IDs
1067 Returns all users IDs
1068 """
1068 """
1069 qry = Session().query(User.user_id)
1069 qry = Session().query(User.user_id)
1070
1070
1071 if only_active:
1071 if only_active:
1072 qry = qry.filter(User.active == true())
1072 qry = qry.filter(User.active == true())
1073 return [x.user_id for x in qry]
1073 return [x.user_id for x in qry]
1074
1074
1075 @classmethod
1075 @classmethod
1076 def get_default_user(cls, cache=False, refresh=False):
1076 def get_default_user(cls, cache=False, refresh=False):
1077 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1077 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1078 if user is None:
1078 if user is None:
1079 raise Exception('FATAL: Missing default account!')
1079 raise Exception('FATAL: Missing default account!')
1080 if refresh:
1080 if refresh:
1081 # The default user might be based on outdated state which
1081 # The default user might be based on outdated state which
1082 # has been loaded from the cache.
1082 # has been loaded from the cache.
1083 # A call to refresh() ensures that the
1083 # A call to refresh() ensures that the
1084 # latest state from the database is used.
1084 # latest state from the database is used.
1085 Session().refresh(user)
1085 Session().refresh(user)
1086
1086
1087 return user
1087 return user
1088
1088
1089 @classmethod
1089 @classmethod
1090 def get_default_user_id(cls):
1090 def get_default_user_id(cls):
1091 import rhodecode
1091 import rhodecode
1092 return rhodecode.CONFIG['default_user_id']
1092 return rhodecode.CONFIG['default_user_id']
1093
1093
1094 def _get_default_perms(self, user, suffix=''):
1094 def _get_default_perms(self, user, suffix=''):
1095 from rhodecode.model.permission import PermissionModel
1095 from rhodecode.model.permission import PermissionModel
1096 return PermissionModel().get_default_perms(user.user_perms, suffix)
1096 return PermissionModel().get_default_perms(user.user_perms, suffix)
1097
1097
1098 def get_default_perms(self, suffix=''):
1098 def get_default_perms(self, suffix=''):
1099 return self._get_default_perms(self, suffix)
1099 return self._get_default_perms(self, suffix)
1100
1100
1101 def get_api_data(self, include_secrets=False, details='full'):
1101 def get_api_data(self, include_secrets=False, details='full'):
1102 """
1102 """
1103 Common function for generating user related data for API
1103 Common function for generating user related data for API
1104
1104
1105 :param include_secrets: By default secrets in the API data will be replaced
1105 :param include_secrets: By default secrets in the API data will be replaced
1106 by a placeholder value to prevent exposing this data by accident. In case
1106 by a placeholder value to prevent exposing this data by accident. In case
1107 this data shall be exposed, set this flag to ``True``.
1107 this data shall be exposed, set this flag to ``True``.
1108
1108
1109 :param details: details can be 'basic|full' basic gives only a subset of
1109 :param details: details can be 'basic|full' basic gives only a subset of
1110 the available user information that includes user_id, name and emails.
1110 the available user information that includes user_id, name and emails.
1111 """
1111 """
1112 user = self
1112 user = self
1113 user_data = self.user_data
1113 user_data = self.user_data
1114 data = {
1114 data = {
1115 'user_id': user.user_id,
1115 'user_id': user.user_id,
1116 'username': user.username,
1116 'username': user.username,
1117 'firstname': user.name,
1117 'firstname': user.name,
1118 'lastname': user.lastname,
1118 'lastname': user.lastname,
1119 'description': user.description,
1119 'description': user.description,
1120 'email': user.email,
1120 'email': user.email,
1121 'emails': user.emails,
1121 'emails': user.emails,
1122 }
1122 }
1123 if details == 'basic':
1123 if details == 'basic':
1124 return data
1124 return data
1125
1125
1126 auth_token_length = 40
1126 auth_token_length = 40
1127 auth_token_replacement = '*' * auth_token_length
1127 auth_token_replacement = '*' * auth_token_length
1128
1128
1129 extras = {
1129 extras = {
1130 'auth_tokens': [auth_token_replacement],
1130 'auth_tokens': [auth_token_replacement],
1131 'active': user.active,
1131 'active': user.active,
1132 'admin': user.admin,
1132 'admin': user.admin,
1133 'extern_type': user.extern_type,
1133 'extern_type': user.extern_type,
1134 'extern_name': user.extern_name,
1134 'extern_name': user.extern_name,
1135 'last_login': user.last_login,
1135 'last_login': user.last_login,
1136 'last_activity': user.last_activity,
1136 'last_activity': user.last_activity,
1137 'ip_addresses': user.ip_addresses,
1137 'ip_addresses': user.ip_addresses,
1138 'language': user_data.get('language')
1138 'language': user_data.get('language')
1139 }
1139 }
1140 data.update(extras)
1140 data.update(extras)
1141
1141
1142 if include_secrets:
1142 if include_secrets:
1143 data['auth_tokens'] = user.auth_tokens
1143 data['auth_tokens'] = user.auth_tokens
1144 return data
1144 return data
1145
1145
1146 def __json__(self):
1146 def __json__(self):
1147 data = {
1147 data = {
1148 'full_name': self.full_name,
1148 'full_name': self.full_name,
1149 'full_name_or_username': self.full_name_or_username,
1149 'full_name_or_username': self.full_name_or_username,
1150 'short_contact': self.short_contact,
1150 'short_contact': self.short_contact,
1151 'full_contact': self.full_contact,
1151 'full_contact': self.full_contact,
1152 }
1152 }
1153 data.update(self.get_api_data())
1153 data.update(self.get_api_data())
1154 return data
1154 return data
1155
1155
1156
1156
1157 class UserApiKeys(Base, BaseModel):
1157 class UserApiKeys(Base, BaseModel):
1158 __tablename__ = 'user_api_keys'
1158 __tablename__ = 'user_api_keys'
1159 __table_args__ = (
1159 __table_args__ = (
1160 Index('uak_api_key_idx', 'api_key'),
1160 Index('uak_api_key_idx', 'api_key'),
1161 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1161 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1162 base_table_args
1162 base_table_args
1163 )
1163 )
1164
1164
1165 # ApiKey role
1165 # ApiKey role
1166 ROLE_ALL = 'token_role_all'
1166 ROLE_ALL = 'token_role_all'
1167 ROLE_VCS = 'token_role_vcs'
1167 ROLE_VCS = 'token_role_vcs'
1168 ROLE_API = 'token_role_api'
1168 ROLE_API = 'token_role_api'
1169 ROLE_HTTP = 'token_role_http'
1169 ROLE_HTTP = 'token_role_http'
1170 ROLE_FEED = 'token_role_feed'
1170 ROLE_FEED = 'token_role_feed'
1171 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1171 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1172 # The last one is ignored in the list as we only
1172 # The last one is ignored in the list as we only
1173 # use it for one action, and cannot be created by users
1173 # use it for one action, and cannot be created by users
1174 ROLE_PASSWORD_RESET = 'token_password_reset'
1174 ROLE_PASSWORD_RESET = 'token_password_reset'
1175
1175
1176 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1176 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1177
1177
1178 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1178 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1179 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1179 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1180 api_key = Column("api_key", String(255), nullable=False, unique=True)
1180 api_key = Column("api_key", String(255), nullable=False, unique=True)
1181 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1181 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1182 expires = Column('expires', Float(53), nullable=False)
1182 expires = Column('expires', Float(53), nullable=False)
1183 role = Column('role', String(255), nullable=True)
1183 role = Column('role', String(255), nullable=True)
1184 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1184 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1185
1185
1186 # scope columns
1186 # scope columns
1187 repo_id = Column(
1187 repo_id = Column(
1188 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1188 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1189 nullable=True, unique=None, default=None)
1189 nullable=True, unique=None, default=None)
1190 repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens')
1190 repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens')
1191
1191
1192 repo_group_id = Column(
1192 repo_group_id = Column(
1193 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1193 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1194 nullable=True, unique=None, default=None)
1194 nullable=True, unique=None, default=None)
1195 repo_group = relationship('RepoGroup', lazy='joined')
1195 repo_group = relationship('RepoGroup', lazy='joined')
1196
1196
1197 user = relationship('User', lazy='joined', back_populates='user_auth_tokens')
1197 user = relationship('User', lazy='joined', back_populates='user_auth_tokens')
1198
1198
1199 def __repr__(self):
1199 def __repr__(self):
1200 return f"<{self.cls_name}('{self.role}')>"
1200 return f"<{self.cls_name}('{self.role}')>"
1201
1201
1202 def __json__(self):
1202 def __json__(self):
1203 data = {
1203 data = {
1204 'auth_token': self.api_key,
1204 'auth_token': self.api_key,
1205 'role': self.role,
1205 'role': self.role,
1206 'scope': self.scope_humanized,
1206 'scope': self.scope_humanized,
1207 'expired': self.expired
1207 'expired': self.expired
1208 }
1208 }
1209 return data
1209 return data
1210
1210
1211 def get_api_data(self, include_secrets=False):
1211 def get_api_data(self, include_secrets=False):
1212 data = self.__json__()
1212 data = self.__json__()
1213 if include_secrets:
1213 if include_secrets:
1214 return data
1214 return data
1215 else:
1215 else:
1216 data['auth_token'] = self.token_obfuscated
1216 data['auth_token'] = self.token_obfuscated
1217 return data
1217 return data
1218
1218
1219 @hybrid_property
1219 @hybrid_property
1220 def description_safe(self):
1220 def description_safe(self):
1221 from rhodecode.lib import helpers as h
1221 from rhodecode.lib import helpers as h
1222 return h.escape(self.description)
1222 return h.escape(self.description)
1223
1223
1224 @property
1224 @property
1225 def expired(self):
1225 def expired(self):
1226 if self.expires == -1:
1226 if self.expires == -1:
1227 return False
1227 return False
1228 return time.time() > self.expires
1228 return time.time() > self.expires
1229
1229
1230 @classmethod
1230 @classmethod
1231 def _get_role_name(cls, role):
1231 def _get_role_name(cls, role):
1232 return {
1232 return {
1233 cls.ROLE_ALL: _('all'),
1233 cls.ROLE_ALL: _('all'),
1234 cls.ROLE_HTTP: _('http/web interface'),
1234 cls.ROLE_HTTP: _('http/web interface'),
1235 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1235 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1236 cls.ROLE_API: _('api calls'),
1236 cls.ROLE_API: _('api calls'),
1237 cls.ROLE_FEED: _('feed access'),
1237 cls.ROLE_FEED: _('feed access'),
1238 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1238 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1239 }.get(role, role)
1239 }.get(role, role)
1240
1240
1241 @classmethod
1241 @classmethod
1242 def _get_role_description(cls, role):
1242 def _get_role_description(cls, role):
1243 return {
1243 return {
1244 cls.ROLE_ALL: _('Token for all actions.'),
1244 cls.ROLE_ALL: _('Token for all actions.'),
1245 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1245 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1246 'login using `api_access_controllers_whitelist` functionality.'),
1246 'login using `api_access_controllers_whitelist` functionality.'),
1247 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1247 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1248 'Requires auth_token authentication plugin to be active. <br/>'
1248 'Requires auth_token authentication plugin to be active. <br/>'
1249 'Such Token should be used then instead of a password to '
1249 'Such Token should be used then instead of a password to '
1250 'interact with a repository, and additionally can be '
1250 'interact with a repository, and additionally can be '
1251 'limited to single repository using repo scope.'),
1251 'limited to single repository using repo scope.'),
1252 cls.ROLE_API: _('Token limited to api calls.'),
1252 cls.ROLE_API: _('Token limited to api calls.'),
1253 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1253 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1254 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1254 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1255 }.get(role, role)
1255 }.get(role, role)
1256
1256
1257 @property
1257 @property
1258 def role_humanized(self):
1258 def role_humanized(self):
1259 return self._get_role_name(self.role)
1259 return self._get_role_name(self.role)
1260
1260
1261 def _get_scope(self):
1261 def _get_scope(self):
1262 if self.repo:
1262 if self.repo:
1263 return 'Repository: {}'.format(self.repo.repo_name)
1263 return 'Repository: {}'.format(self.repo.repo_name)
1264 if self.repo_group:
1264 if self.repo_group:
1265 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1265 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1266 return 'Global'
1266 return 'Global'
1267
1267
1268 @property
1268 @property
1269 def scope_humanized(self):
1269 def scope_humanized(self):
1270 return self._get_scope()
1270 return self._get_scope()
1271
1271
1272 @property
1272 @property
1273 def token_obfuscated(self):
1273 def token_obfuscated(self):
1274 if self.api_key:
1274 if self.api_key:
1275 return self.api_key[:4] + "****"
1275 return self.api_key[:4] + "****"
1276
1276
1277
1277
1278 class UserEmailMap(Base, BaseModel):
1278 class UserEmailMap(Base, BaseModel):
1279 __tablename__ = 'user_email_map'
1279 __tablename__ = 'user_email_map'
1280 __table_args__ = (
1280 __table_args__ = (
1281 Index('uem_email_idx', 'email'),
1281 Index('uem_email_idx', 'email'),
1282 Index('uem_user_id_idx', 'user_id'),
1282 Index('uem_user_id_idx', 'user_id'),
1283 UniqueConstraint('email'),
1283 UniqueConstraint('email'),
1284 base_table_args
1284 base_table_args
1285 )
1285 )
1286
1286
1287 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1287 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1288 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1288 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1289 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1289 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1290 user = relationship('User', lazy='joined', back_populates='user_emails')
1290 user = relationship('User', lazy='joined', back_populates='user_emails')
1291
1291
1292 @validates('_email')
1292 @validates('_email')
1293 def validate_email(self, key, email):
1293 def validate_email(self, key, email):
1294 # check if this email is not main one
1294 # check if this email is not main one
1295 main_email = Session().query(User).filter(User.email == email).scalar()
1295 main_email = Session().query(User).filter(User.email == email).scalar()
1296 if main_email is not None:
1296 if main_email is not None:
1297 raise AttributeError('email %s is present is user table' % email)
1297 raise AttributeError('email %s is present is user table' % email)
1298 return email
1298 return email
1299
1299
1300 @hybrid_property
1300 @hybrid_property
1301 def email(self):
1301 def email(self):
1302 return self._email
1302 return self._email
1303
1303
1304 @email.setter
1304 @email.setter
1305 def email(self, val):
1305 def email(self, val):
1306 self._email = val.lower() if val else None
1306 self._email = val.lower() if val else None
1307
1307
1308
1308
1309 class UserIpMap(Base, BaseModel):
1309 class UserIpMap(Base, BaseModel):
1310 __tablename__ = 'user_ip_map'
1310 __tablename__ = 'user_ip_map'
1311 __table_args__ = (
1311 __table_args__ = (
1312 UniqueConstraint('user_id', 'ip_addr'),
1312 UniqueConstraint('user_id', 'ip_addr'),
1313 base_table_args
1313 base_table_args
1314 )
1314 )
1315
1315
1316 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1316 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1317 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1317 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1318 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1318 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1319 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1319 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1320 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1320 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1321 user = relationship('User', lazy='joined', back_populates='user_ip_map')
1321 user = relationship('User', lazy='joined', back_populates='user_ip_map')
1322
1322
1323 @hybrid_property
1323 @hybrid_property
1324 def description_safe(self):
1324 def description_safe(self):
1325 from rhodecode.lib import helpers as h
1325 from rhodecode.lib import helpers as h
1326 return h.escape(self.description)
1326 return h.escape(self.description)
1327
1327
1328 @classmethod
1328 @classmethod
1329 def _get_ip_range(cls, ip_addr):
1329 def _get_ip_range(cls, ip_addr):
1330 net = ipaddress.ip_network(safe_str(ip_addr), strict=False)
1330 net = ipaddress.ip_network(safe_str(ip_addr), strict=False)
1331 return [str(net.network_address), str(net.broadcast_address)]
1331 return [str(net.network_address), str(net.broadcast_address)]
1332
1332
1333 def __json__(self):
1333 def __json__(self):
1334 return {
1334 return {
1335 'ip_addr': self.ip_addr,
1335 'ip_addr': self.ip_addr,
1336 'ip_range': self._get_ip_range(self.ip_addr),
1336 'ip_range': self._get_ip_range(self.ip_addr),
1337 }
1337 }
1338
1338
1339 def __repr__(self):
1339 def __repr__(self):
1340 return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>"
1340 return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>"
1341
1341
1342
1342
1343 class UserSshKeys(Base, BaseModel):
1343 class UserSshKeys(Base, BaseModel):
1344 __tablename__ = 'user_ssh_keys'
1344 __tablename__ = 'user_ssh_keys'
1345 __table_args__ = (
1345 __table_args__ = (
1346 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1346 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1347
1347
1348 UniqueConstraint('ssh_key_fingerprint'),
1348 UniqueConstraint('ssh_key_fingerprint'),
1349
1349
1350 base_table_args
1350 base_table_args
1351 )
1351 )
1352
1352
1353 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1353 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1354 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1354 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1355 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1355 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1356
1356
1357 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1357 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1358
1358
1359 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1359 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1360 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1360 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1361 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1361 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1362
1362
1363 user = relationship('User', lazy='joined', back_populates='user_ssh_keys')
1363 user = relationship('User', lazy='joined', back_populates='user_ssh_keys')
1364
1364
1365 def __json__(self):
1365 def __json__(self):
1366 data = {
1366 data = {
1367 'ssh_fingerprint': self.ssh_key_fingerprint,
1367 'ssh_fingerprint': self.ssh_key_fingerprint,
1368 'description': self.description,
1368 'description': self.description,
1369 'created_on': self.created_on
1369 'created_on': self.created_on
1370 }
1370 }
1371 return data
1371 return data
1372
1372
1373 def get_api_data(self):
1373 def get_api_data(self):
1374 data = self.__json__()
1374 data = self.__json__()
1375 return data
1375 return data
1376
1376
1377
1377
1378 class UserLog(Base, BaseModel):
1378 class UserLog(Base, BaseModel):
1379 __tablename__ = 'user_logs'
1379 __tablename__ = 'user_logs'
1380 __table_args__ = (
1380 __table_args__ = (
1381 base_table_args,
1381 base_table_args,
1382 )
1382 )
1383
1383
1384 VERSION_1 = 'v1'
1384 VERSION_1 = 'v1'
1385 VERSION_2 = 'v2'
1385 VERSION_2 = 'v2'
1386 VERSIONS = [VERSION_1, VERSION_2]
1386 VERSIONS = [VERSION_1, VERSION_2]
1387
1387
1388 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1388 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1389 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1389 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1390 username = Column("username", String(255), nullable=True, unique=None, default=None)
1390 username = Column("username", String(255), nullable=True, unique=None, default=None)
1391 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1391 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1392 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1392 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1393 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1393 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1394 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1394 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1395 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1395 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1396
1396
1397 version = Column("version", String(255), nullable=True, default=VERSION_1)
1397 version = Column("version", String(255), nullable=True, default=VERSION_1)
1398 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1398 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1399 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1399 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1400 user = relationship('User', cascade='', back_populates='user_log')
1400 user = relationship('User', cascade='', back_populates='user_log')
1401 repository = relationship('Repository', cascade='', back_populates='logs')
1401 repository = relationship('Repository', cascade='', back_populates='logs')
1402
1402
1403 def __repr__(self):
1403 def __repr__(self):
1404 return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>"
1404 return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>"
1405
1405
1406 def __json__(self):
1406 def __json__(self):
1407 return {
1407 return {
1408 'user_id': self.user_id,
1408 'user_id': self.user_id,
1409 'username': self.username,
1409 'username': self.username,
1410 'repository_id': self.repository_id,
1410 'repository_id': self.repository_id,
1411 'repository_name': self.repository_name,
1411 'repository_name': self.repository_name,
1412 'user_ip': self.user_ip,
1412 'user_ip': self.user_ip,
1413 'action_date': self.action_date,
1413 'action_date': self.action_date,
1414 'action': self.action,
1414 'action': self.action,
1415 }
1415 }
1416
1416
1417 @hybrid_property
1417 @hybrid_property
1418 def entry_id(self):
1418 def entry_id(self):
1419 return self.user_log_id
1419 return self.user_log_id
1420
1420
1421 @property
1421 @property
1422 def action_as_day(self):
1422 def action_as_day(self):
1423 return datetime.date(*self.action_date.timetuple()[:3])
1423 return datetime.date(*self.action_date.timetuple()[:3])
1424
1424
1425
1425
1426 class UserGroup(Base, BaseModel):
1426 class UserGroup(Base, BaseModel):
1427 __tablename__ = 'users_groups'
1427 __tablename__ = 'users_groups'
1428 __table_args__ = (
1428 __table_args__ = (
1429 base_table_args,
1429 base_table_args,
1430 )
1430 )
1431
1431
1432 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1432 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1433 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1433 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1434 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1434 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1435 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1435 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1436 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1436 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1437 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1437 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1438 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1438 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1439 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1439 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1440
1440
1441 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group')
1441 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group')
1442 users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group')
1442 users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group')
1443 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group')
1443 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group')
1444 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group')
1444 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group')
1445 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group')
1445 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group')
1446
1446
1447 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group')
1447 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group')
1448
1448
1449 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group')
1449 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group')
1450 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups')
1450 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups')
1451
1451
1452 @classmethod
1452 @classmethod
1453 def _load_group_data(cls, column):
1453 def _load_group_data(cls, column):
1454 if not column:
1454 if not column:
1455 return {}
1455 return {}
1456
1456
1457 try:
1457 try:
1458 return json.loads(column) or {}
1458 return json.loads(column) or {}
1459 except TypeError:
1459 except TypeError:
1460 return {}
1460 return {}
1461
1461
1462 @hybrid_property
1462 @hybrid_property
1463 def description_safe(self):
1463 def description_safe(self):
1464 from rhodecode.lib import helpers as h
1464 from rhodecode.lib import helpers as h
1465 return h.escape(self.user_group_description)
1465 return h.escape(self.user_group_description)
1466
1466
1467 @hybrid_property
1467 @hybrid_property
1468 def group_data(self):
1468 def group_data(self):
1469 return self._load_group_data(self._group_data)
1469 return self._load_group_data(self._group_data)
1470
1470
1471 @group_data.expression
1471 @group_data.expression
1472 def group_data(self, **kwargs):
1472 def group_data(self, **kwargs):
1473 return self._group_data
1473 return self._group_data
1474
1474
1475 @group_data.setter
1475 @group_data.setter
1476 def group_data(self, val):
1476 def group_data(self, val):
1477 try:
1477 try:
1478 self._group_data = json.dumps(val)
1478 self._group_data = json.dumps(val)
1479 except Exception:
1479 except Exception:
1480 log.error(traceback.format_exc())
1480 log.error(traceback.format_exc())
1481
1481
1482 @classmethod
1482 @classmethod
1483 def _load_sync(cls, group_data):
1483 def _load_sync(cls, group_data):
1484 if group_data:
1484 if group_data:
1485 return group_data.get('extern_type')
1485 return group_data.get('extern_type')
1486
1486
1487 @property
1487 @property
1488 def sync(self):
1488 def sync(self):
1489 return self._load_sync(self.group_data)
1489 return self._load_sync(self.group_data)
1490
1490
1491 def __repr__(self):
1491 def __repr__(self):
1492 return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>"
1492 return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>"
1493
1493
1494 @classmethod
1494 @classmethod
1495 def get_by_group_name(cls, group_name, cache=False,
1495 def get_by_group_name(cls, group_name, cache=False,
1496 case_insensitive=False):
1496 case_insensitive=False):
1497 if case_insensitive:
1497 if case_insensitive:
1498 q = cls.query().filter(func.lower(cls.users_group_name) ==
1498 q = cls.query().filter(func.lower(cls.users_group_name) ==
1499 func.lower(group_name))
1499 func.lower(group_name))
1500
1500
1501 else:
1501 else:
1502 q = cls.query().filter(cls.users_group_name == group_name)
1502 q = cls.query().filter(cls.users_group_name == group_name)
1503 if cache:
1503 if cache:
1504 name_key = _hash_key(group_name)
1504 name_key = _hash_key(group_name)
1505 q = q.options(
1505 q = q.options(
1506 FromCache("sql_cache_short", f"get_group_{name_key}"))
1506 FromCache("sql_cache_short", f"get_group_{name_key}"))
1507 return q.scalar()
1507 return q.scalar()
1508
1508
1509 @classmethod
1509 @classmethod
1510 def get(cls, user_group_id, cache=False):
1510 def get(cls, user_group_id, cache=False):
1511 if not user_group_id:
1511 if not user_group_id:
1512 return
1512 return
1513
1513
1514 user_group = cls.query()
1514 user_group = cls.query()
1515 if cache:
1515 if cache:
1516 user_group = user_group.options(
1516 user_group = user_group.options(
1517 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1517 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1518 return user_group.get(user_group_id)
1518 return user_group.get(user_group_id)
1519
1519
1520 def permissions(self, with_admins=True, with_owner=True,
1520 def permissions(self, with_admins=True, with_owner=True,
1521 expand_from_user_groups=False):
1521 expand_from_user_groups=False):
1522 """
1522 """
1523 Permissions for user groups
1523 Permissions for user groups
1524 """
1524 """
1525 _admin_perm = 'usergroup.admin'
1525 _admin_perm = 'usergroup.admin'
1526
1526
1527 owner_row = []
1527 owner_row = []
1528 if with_owner:
1528 if with_owner:
1529 usr = AttributeDict(self.user.get_dict())
1529 usr = AttributeDict(self.user.get_dict())
1530 usr.owner_row = True
1530 usr.owner_row = True
1531 usr.permission = _admin_perm
1531 usr.permission = _admin_perm
1532 owner_row.append(usr)
1532 owner_row.append(usr)
1533
1533
1534 super_admin_ids = []
1534 super_admin_ids = []
1535 super_admin_rows = []
1535 super_admin_rows = []
1536 if with_admins:
1536 if with_admins:
1537 for usr in User.get_all_super_admins():
1537 for usr in User.get_all_super_admins():
1538 super_admin_ids.append(usr.user_id)
1538 super_admin_ids.append(usr.user_id)
1539 # if this admin is also owner, don't double the record
1539 # if this admin is also owner, don't double the record
1540 if usr.user_id == owner_row[0].user_id:
1540 if usr.user_id == owner_row[0].user_id:
1541 owner_row[0].admin_row = True
1541 owner_row[0].admin_row = True
1542 else:
1542 else:
1543 usr = AttributeDict(usr.get_dict())
1543 usr = AttributeDict(usr.get_dict())
1544 usr.admin_row = True
1544 usr.admin_row = True
1545 usr.permission = _admin_perm
1545 usr.permission = _admin_perm
1546 super_admin_rows.append(usr)
1546 super_admin_rows.append(usr)
1547
1547
1548 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1548 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1549 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1549 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1550 joinedload(UserUserGroupToPerm.user),
1550 joinedload(UserUserGroupToPerm.user),
1551 joinedload(UserUserGroupToPerm.permission),)
1551 joinedload(UserUserGroupToPerm.permission),)
1552
1552
1553 # get owners and admins and permissions. We do a trick of re-writing
1553 # get owners and admins and permissions. We do a trick of re-writing
1554 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1554 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1555 # has a global reference and changing one object propagates to all
1555 # has a global reference and changing one object propagates to all
1556 # others. This means if admin is also an owner admin_row that change
1556 # others. This means if admin is also an owner admin_row that change
1557 # would propagate to both objects
1557 # would propagate to both objects
1558 perm_rows = []
1558 perm_rows = []
1559 for _usr in q.all():
1559 for _usr in q.all():
1560 usr = AttributeDict(_usr.user.get_dict())
1560 usr = AttributeDict(_usr.user.get_dict())
1561 # if this user is also owner/admin, mark as duplicate record
1561 # if this user is also owner/admin, mark as duplicate record
1562 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1562 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1563 usr.duplicate_perm = True
1563 usr.duplicate_perm = True
1564 usr.permission = _usr.permission.permission_name
1564 usr.permission = _usr.permission.permission_name
1565 perm_rows.append(usr)
1565 perm_rows.append(usr)
1566
1566
1567 # filter the perm rows by 'default' first and then sort them by
1567 # filter the perm rows by 'default' first and then sort them by
1568 # admin,write,read,none permissions sorted again alphabetically in
1568 # admin,write,read,none permissions sorted again alphabetically in
1569 # each group
1569 # each group
1570 perm_rows = sorted(perm_rows, key=display_user_sort)
1570 perm_rows = sorted(perm_rows, key=display_user_sort)
1571
1571
1572 user_groups_rows = []
1572 user_groups_rows = []
1573 if expand_from_user_groups:
1573 if expand_from_user_groups:
1574 for ug in self.permission_user_groups(with_members=True):
1574 for ug in self.permission_user_groups(with_members=True):
1575 for user_data in ug.members:
1575 for user_data in ug.members:
1576 user_groups_rows.append(user_data)
1576 user_groups_rows.append(user_data)
1577
1577
1578 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1578 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1579
1579
1580 def permission_user_groups(self, with_members=False):
1580 def permission_user_groups(self, with_members=False):
1581 q = UserGroupUserGroupToPerm.query()\
1581 q = UserGroupUserGroupToPerm.query()\
1582 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1582 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1583 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1583 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1584 joinedload(UserGroupUserGroupToPerm.target_user_group),
1584 joinedload(UserGroupUserGroupToPerm.target_user_group),
1585 joinedload(UserGroupUserGroupToPerm.permission),)
1585 joinedload(UserGroupUserGroupToPerm.permission),)
1586
1586
1587 perm_rows = []
1587 perm_rows = []
1588 for _user_group in q.all():
1588 for _user_group in q.all():
1589 entry = AttributeDict(_user_group.user_group.get_dict())
1589 entry = AttributeDict(_user_group.user_group.get_dict())
1590 entry.permission = _user_group.permission.permission_name
1590 entry.permission = _user_group.permission.permission_name
1591 if with_members:
1591 if with_members:
1592 entry.members = [x.user.get_dict()
1592 entry.members = [x.user.get_dict()
1593 for x in _user_group.user_group.members]
1593 for x in _user_group.user_group.members]
1594 perm_rows.append(entry)
1594 perm_rows.append(entry)
1595
1595
1596 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1596 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1597 return perm_rows
1597 return perm_rows
1598
1598
1599 def _get_default_perms(self, user_group, suffix=''):
1599 def _get_default_perms(self, user_group, suffix=''):
1600 from rhodecode.model.permission import PermissionModel
1600 from rhodecode.model.permission import PermissionModel
1601 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1601 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1602
1602
1603 def get_default_perms(self, suffix=''):
1603 def get_default_perms(self, suffix=''):
1604 return self._get_default_perms(self, suffix)
1604 return self._get_default_perms(self, suffix)
1605
1605
1606 def get_api_data(self, with_group_members=True, include_secrets=False):
1606 def get_api_data(self, with_group_members=True, include_secrets=False):
1607 """
1607 """
1608 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1608 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1609 basically forwarded.
1609 basically forwarded.
1610
1610
1611 """
1611 """
1612 user_group = self
1612 user_group = self
1613 data = {
1613 data = {
1614 'users_group_id': user_group.users_group_id,
1614 'users_group_id': user_group.users_group_id,
1615 'group_name': user_group.users_group_name,
1615 'group_name': user_group.users_group_name,
1616 'group_description': user_group.user_group_description,
1616 'group_description': user_group.user_group_description,
1617 'active': user_group.users_group_active,
1617 'active': user_group.users_group_active,
1618 'owner': user_group.user.username,
1618 'owner': user_group.user.username,
1619 'sync': user_group.sync,
1619 'sync': user_group.sync,
1620 'owner_email': user_group.user.email,
1620 'owner_email': user_group.user.email,
1621 }
1621 }
1622
1622
1623 if with_group_members:
1623 if with_group_members:
1624 users = []
1624 users = []
1625 for user in user_group.members:
1625 for user in user_group.members:
1626 user = user.user
1626 user = user.user
1627 users.append(user.get_api_data(include_secrets=include_secrets))
1627 users.append(user.get_api_data(include_secrets=include_secrets))
1628 data['users'] = users
1628 data['users'] = users
1629
1629
1630 return data
1630 return data
1631
1631
1632
1632
1633 class UserGroupMember(Base, BaseModel):
1633 class UserGroupMember(Base, BaseModel):
1634 __tablename__ = 'users_groups_members'
1634 __tablename__ = 'users_groups_members'
1635 __table_args__ = (
1635 __table_args__ = (
1636 base_table_args,
1636 base_table_args,
1637 )
1637 )
1638
1638
1639 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1639 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1640 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1640 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1641 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1641 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1642
1642
1643 user = relationship('User', lazy='joined', back_populates='group_member')
1643 user = relationship('User', lazy='joined', back_populates='group_member')
1644 users_group = relationship('UserGroup', back_populates='members')
1644 users_group = relationship('UserGroup', back_populates='members')
1645
1645
1646 def __init__(self, gr_id='', u_id=''):
1646 def __init__(self, gr_id='', u_id=''):
1647 self.users_group_id = gr_id
1647 self.users_group_id = gr_id
1648 self.user_id = u_id
1648 self.user_id = u_id
1649
1649
1650
1650
1651 class RepositoryField(Base, BaseModel):
1651 class RepositoryField(Base, BaseModel):
1652 __tablename__ = 'repositories_fields'
1652 __tablename__ = 'repositories_fields'
1653 __table_args__ = (
1653 __table_args__ = (
1654 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1654 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1655 base_table_args,
1655 base_table_args,
1656 )
1656 )
1657
1657
1658 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1658 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1659
1659
1660 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1660 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1661 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1661 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1662 field_key = Column("field_key", String(250))
1662 field_key = Column("field_key", String(250))
1663 field_label = Column("field_label", String(1024), nullable=False)
1663 field_label = Column("field_label", String(1024), nullable=False)
1664 field_value = Column("field_value", String(10000), nullable=False)
1664 field_value = Column("field_value", String(10000), nullable=False)
1665 field_desc = Column("field_desc", String(1024), nullable=False)
1665 field_desc = Column("field_desc", String(1024), nullable=False)
1666 field_type = Column("field_type", String(255), nullable=False, unique=None)
1666 field_type = Column("field_type", String(255), nullable=False, unique=None)
1667 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1667 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1668
1668
1669 repository = relationship('Repository', back_populates='extra_fields')
1669 repository = relationship('Repository', back_populates='extra_fields')
1670
1670
1671 @property
1671 @property
1672 def field_key_prefixed(self):
1672 def field_key_prefixed(self):
1673 return 'ex_%s' % self.field_key
1673 return 'ex_%s' % self.field_key
1674
1674
1675 @classmethod
1675 @classmethod
1676 def un_prefix_key(cls, key):
1676 def un_prefix_key(cls, key):
1677 if key.startswith(cls.PREFIX):
1677 if key.startswith(cls.PREFIX):
1678 return key[len(cls.PREFIX):]
1678 return key[len(cls.PREFIX):]
1679 return key
1679 return key
1680
1680
1681 @classmethod
1681 @classmethod
1682 def get_by_key_name(cls, key, repo):
1682 def get_by_key_name(cls, key, repo):
1683 row = cls.query()\
1683 row = cls.query()\
1684 .filter(cls.repository == repo)\
1684 .filter(cls.repository == repo)\
1685 .filter(cls.field_key == key).scalar()
1685 .filter(cls.field_key == key).scalar()
1686 return row
1686 return row
1687
1687
1688
1688
1689 class Repository(Base, BaseModel):
1689 class Repository(Base, BaseModel):
1690 __tablename__ = 'repositories'
1690 __tablename__ = 'repositories'
1691 __table_args__ = (
1691 __table_args__ = (
1692 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1692 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1693 base_table_args,
1693 base_table_args,
1694 )
1694 )
1695 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1695 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1696 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1696 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1697 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1697 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1698
1698
1699 STATE_CREATED = 'repo_state_created'
1699 STATE_CREATED = 'repo_state_created'
1700 STATE_PENDING = 'repo_state_pending'
1700 STATE_PENDING = 'repo_state_pending'
1701 STATE_ERROR = 'repo_state_error'
1701 STATE_ERROR = 'repo_state_error'
1702
1702
1703 LOCK_AUTOMATIC = 'lock_auto'
1703 LOCK_AUTOMATIC = 'lock_auto'
1704 LOCK_API = 'lock_api'
1704 LOCK_API = 'lock_api'
1705 LOCK_WEB = 'lock_web'
1705 LOCK_WEB = 'lock_web'
1706 LOCK_PULL = 'lock_pull'
1706 LOCK_PULL = 'lock_pull'
1707
1707
1708 NAME_SEP = URL_SEP
1708 NAME_SEP = URL_SEP
1709
1709
1710 repo_id = Column(
1710 repo_id = Column(
1711 "repo_id", Integer(), nullable=False, unique=True, default=None,
1711 "repo_id", Integer(), nullable=False, unique=True, default=None,
1712 primary_key=True)
1712 primary_key=True)
1713 _repo_name = Column(
1713 _repo_name = Column(
1714 "repo_name", Text(), nullable=False, default=None)
1714 "repo_name", Text(), nullable=False, default=None)
1715 repo_name_hash = Column(
1715 repo_name_hash = Column(
1716 "repo_name_hash", String(255), nullable=False, unique=True)
1716 "repo_name_hash", String(255), nullable=False, unique=True)
1717 repo_state = Column("repo_state", String(255), nullable=True)
1717 repo_state = Column("repo_state", String(255), nullable=True)
1718
1718
1719 clone_uri = Column(
1719 clone_uri = Column(
1720 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1720 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1721 default=None)
1721 default=None)
1722 push_uri = Column(
1722 push_uri = Column(
1723 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1723 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1724 default=None)
1724 default=None)
1725 repo_type = Column(
1725 repo_type = Column(
1726 "repo_type", String(255), nullable=False, unique=False, default=None)
1726 "repo_type", String(255), nullable=False, unique=False, default=None)
1727 user_id = Column(
1727 user_id = Column(
1728 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1728 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1729 unique=False, default=None)
1729 unique=False, default=None)
1730 private = Column(
1730 private = Column(
1731 "private", Boolean(), nullable=True, unique=None, default=None)
1731 "private", Boolean(), nullable=True, unique=None, default=None)
1732 archived = Column(
1732 archived = Column(
1733 "archived", Boolean(), nullable=True, unique=None, default=None)
1733 "archived", Boolean(), nullable=True, unique=None, default=None)
1734 enable_statistics = Column(
1734 enable_statistics = Column(
1735 "statistics", Boolean(), nullable=True, unique=None, default=True)
1735 "statistics", Boolean(), nullable=True, unique=None, default=True)
1736 enable_downloads = Column(
1736 enable_downloads = Column(
1737 "downloads", Boolean(), nullable=True, unique=None, default=True)
1737 "downloads", Boolean(), nullable=True, unique=None, default=True)
1738 description = Column(
1738 description = Column(
1739 "description", String(10000), nullable=True, unique=None, default=None)
1739 "description", String(10000), nullable=True, unique=None, default=None)
1740 created_on = Column(
1740 created_on = Column(
1741 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1741 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1742 default=datetime.datetime.now)
1742 default=datetime.datetime.now)
1743 updated_on = Column(
1743 updated_on = Column(
1744 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1744 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1745 default=datetime.datetime.now)
1745 default=datetime.datetime.now)
1746 _landing_revision = Column(
1746 _landing_revision = Column(
1747 "landing_revision", String(255), nullable=False, unique=False,
1747 "landing_revision", String(255), nullable=False, unique=False,
1748 default=None)
1748 default=None)
1749 enable_locking = Column(
1749 enable_locking = Column(
1750 "enable_locking", Boolean(), nullable=False, unique=None,
1750 "enable_locking", Boolean(), nullable=False, unique=None,
1751 default=False)
1751 default=False)
1752 _locked = Column(
1752 _locked = Column(
1753 "locked", String(255), nullable=True, unique=False, default=None)
1753 "locked", String(255), nullable=True, unique=False, default=None)
1754 _changeset_cache = Column(
1754 _changeset_cache = Column(
1755 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1755 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1756
1756
1757 fork_id = Column(
1757 fork_id = Column(
1758 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1758 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1759 nullable=True, unique=False, default=None)
1759 nullable=True, unique=False, default=None)
1760 group_id = Column(
1760 group_id = Column(
1761 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1761 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1762 unique=False, default=None)
1762 unique=False, default=None)
1763
1763
1764 user = relationship('User', lazy='joined', back_populates='repositories')
1764 user = relationship('User', lazy='joined', back_populates='repositories')
1765 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1765 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1766 group = relationship('RepoGroup', lazy='joined')
1766 group = relationship('RepoGroup', lazy='joined')
1767 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
1767 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
1768 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository')
1768 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository')
1769 stats = relationship('Statistics', cascade='all', uselist=False)
1769 stats = relationship('Statistics', cascade='all', uselist=False)
1770
1770
1771 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository')
1771 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository')
1772 extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository')
1772 extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository')
1773
1773
1774 logs = relationship('UserLog', back_populates='repository')
1774 logs = relationship('UserLog', back_populates='repository')
1775
1775
1776 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo')
1776 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo')
1777
1777
1778 pull_requests_source = relationship(
1778 pull_requests_source = relationship(
1779 'PullRequest',
1779 'PullRequest',
1780 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1780 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1781 cascade="all, delete-orphan",
1781 cascade="all, delete-orphan",
1782 overlaps="source_repo"
1782 overlaps="source_repo"
1783 )
1783 )
1784 pull_requests_target = relationship(
1784 pull_requests_target = relationship(
1785 'PullRequest',
1785 'PullRequest',
1786 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1786 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1787 cascade="all, delete-orphan",
1787 cascade="all, delete-orphan",
1788 overlaps="target_repo"
1788 overlaps="target_repo"
1789 )
1789 )
1790
1790
1791 ui = relationship('RepoRhodeCodeUi', cascade="all")
1791 ui = relationship('RepoRhodeCodeUi', cascade="all")
1792 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1792 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1793 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo')
1793 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo')
1794
1794
1795 scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo')
1795 scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo')
1796
1796
1797 # no cascade, set NULL
1797 # no cascade, set NULL
1798 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True)
1798 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True)
1799
1799
1800 review_rules = relationship('RepoReviewRule')
1800 review_rules = relationship('RepoReviewRule')
1801 user_branch_perms = relationship('UserToRepoBranchPermission')
1801 user_branch_perms = relationship('UserToRepoBranchPermission')
1802 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission')
1802 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission')
1803
1803
1804 def __repr__(self):
1804 def __repr__(self):
1805 return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name)
1805 return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name)
1806
1806
1807 @hybrid_property
1807 @hybrid_property
1808 def description_safe(self):
1808 def description_safe(self):
1809 from rhodecode.lib import helpers as h
1809 from rhodecode.lib import helpers as h
1810 return h.escape(self.description)
1810 return h.escape(self.description)
1811
1811
1812 @hybrid_property
1812 @hybrid_property
1813 def landing_rev(self):
1813 def landing_rev(self):
1814 # always should return [rev_type, rev], e.g ['branch', 'master']
1814 # always should return [rev_type, rev], e.g ['branch', 'master']
1815 if self._landing_revision:
1815 if self._landing_revision:
1816 _rev_info = self._landing_revision.split(':')
1816 _rev_info = self._landing_revision.split(':')
1817 if len(_rev_info) < 2:
1817 if len(_rev_info) < 2:
1818 _rev_info.insert(0, 'rev')
1818 _rev_info.insert(0, 'rev')
1819 return [_rev_info[0], _rev_info[1]]
1819 return [_rev_info[0], _rev_info[1]]
1820 return [None, None]
1820 return [None, None]
1821
1821
1822 @property
1822 @property
1823 def landing_ref_type(self):
1823 def landing_ref_type(self):
1824 return self.landing_rev[0]
1824 return self.landing_rev[0]
1825
1825
1826 @property
1826 @property
1827 def landing_ref_name(self):
1827 def landing_ref_name(self):
1828 return self.landing_rev[1]
1828 return self.landing_rev[1]
1829
1829
1830 @landing_rev.setter
1830 @landing_rev.setter
1831 def landing_rev(self, val):
1831 def landing_rev(self, val):
1832 if ':' not in val:
1832 if ':' not in val:
1833 raise ValueError('value must be delimited with `:` and consist '
1833 raise ValueError('value must be delimited with `:` and consist '
1834 'of <rev_type>:<rev>, got %s instead' % val)
1834 'of <rev_type>:<rev>, got %s instead' % val)
1835 self._landing_revision = val
1835 self._landing_revision = val
1836
1836
1837 @hybrid_property
1837 @hybrid_property
1838 def locked(self):
1838 def locked(self):
1839 if self._locked:
1839 if self._locked:
1840 user_id, timelocked, reason = self._locked.split(':')
1840 user_id, timelocked, reason = self._locked.split(':')
1841 lock_values = int(user_id), timelocked, reason
1841 lock_values = int(user_id), timelocked, reason
1842 else:
1842 else:
1843 lock_values = [None, None, None]
1843 lock_values = [None, None, None]
1844 return lock_values
1844 return lock_values
1845
1845
1846 @locked.setter
1846 @locked.setter
1847 def locked(self, val):
1847 def locked(self, val):
1848 if val and isinstance(val, (list, tuple)):
1848 if val and isinstance(val, (list, tuple)):
1849 self._locked = ':'.join(map(str, val))
1849 self._locked = ':'.join(map(str, val))
1850 else:
1850 else:
1851 self._locked = None
1851 self._locked = None
1852
1852
1853 @classmethod
1853 @classmethod
1854 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1854 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1855 from rhodecode.lib.vcs.backends.base import EmptyCommit
1855 from rhodecode.lib.vcs.backends.base import EmptyCommit
1856 dummy = EmptyCommit().__json__()
1856 dummy = EmptyCommit().__json__()
1857 if not changeset_cache_raw:
1857 if not changeset_cache_raw:
1858 dummy['source_repo_id'] = repo_id
1858 dummy['source_repo_id'] = repo_id
1859 return json.loads(json.dumps(dummy))
1859 return json.loads(json.dumps(dummy))
1860
1860
1861 try:
1861 try:
1862 return json.loads(changeset_cache_raw)
1862 return json.loads(changeset_cache_raw)
1863 except TypeError:
1863 except TypeError:
1864 return dummy
1864 return dummy
1865 except Exception:
1865 except Exception:
1866 log.error(traceback.format_exc())
1866 log.error(traceback.format_exc())
1867 return dummy
1867 return dummy
1868
1868
1869 @hybrid_property
1869 @hybrid_property
1870 def changeset_cache(self):
1870 def changeset_cache(self):
1871 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1871 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1872
1872
1873 @changeset_cache.setter
1873 @changeset_cache.setter
1874 def changeset_cache(self, val):
1874 def changeset_cache(self, val):
1875 try:
1875 try:
1876 self._changeset_cache = json.dumps(val)
1876 self._changeset_cache = json.dumps(val)
1877 except Exception:
1877 except Exception:
1878 log.error(traceback.format_exc())
1878 log.error(traceback.format_exc())
1879
1879
1880 @hybrid_property
1880 @hybrid_property
1881 def repo_name(self):
1881 def repo_name(self):
1882 return self._repo_name
1882 return self._repo_name
1883
1883
1884 @repo_name.setter
1884 @repo_name.setter
1885 def repo_name(self, value):
1885 def repo_name(self, value):
1886 self._repo_name = value
1886 self._repo_name = value
1887 self.repo_name_hash = sha1(safe_bytes(value))
1887 self.repo_name_hash = sha1(safe_bytes(value))
1888
1888
1889 @classmethod
1889 @classmethod
1890 def normalize_repo_name(cls, repo_name):
1890 def normalize_repo_name(cls, repo_name):
1891 """
1891 """
1892 Normalizes os specific repo_name to the format internally stored inside
1892 Normalizes os specific repo_name to the format internally stored inside
1893 database using URL_SEP
1893 database using URL_SEP
1894
1894
1895 :param cls:
1895 :param cls:
1896 :param repo_name:
1896 :param repo_name:
1897 """
1897 """
1898 return cls.NAME_SEP.join(repo_name.split(os.sep))
1898 return cls.NAME_SEP.join(repo_name.split(os.sep))
1899
1899
1900 @classmethod
1900 @classmethod
1901 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1901 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1902 session = Session()
1902 session = Session()
1903 q = session.query(cls).filter(cls.repo_name == repo_name)
1903 q = session.query(cls).filter(cls.repo_name == repo_name)
1904
1904
1905 if cache:
1905 if cache:
1906 if identity_cache:
1906 if identity_cache:
1907 val = cls.identity_cache(session, 'repo_name', repo_name)
1907 val = cls.identity_cache(session, 'repo_name', repo_name)
1908 if val:
1908 if val:
1909 return val
1909 return val
1910 else:
1910 else:
1911 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1911 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1912 q = q.options(
1912 q = q.options(
1913 FromCache("sql_cache_short", cache_key))
1913 FromCache("sql_cache_short", cache_key))
1914
1914
1915 return q.scalar()
1915 return q.scalar()
1916
1916
1917 @classmethod
1917 @classmethod
1918 def get_by_id_or_repo_name(cls, repoid):
1918 def get_by_id_or_repo_name(cls, repoid):
1919 if isinstance(repoid, int):
1919 if isinstance(repoid, int):
1920 try:
1920 try:
1921 repo = cls.get(repoid)
1921 repo = cls.get(repoid)
1922 except ValueError:
1922 except ValueError:
1923 repo = None
1923 repo = None
1924 else:
1924 else:
1925 repo = cls.get_by_repo_name(repoid)
1925 repo = cls.get_by_repo_name(repoid)
1926 return repo
1926 return repo
1927
1927
1928 @classmethod
1928 @classmethod
1929 def get_by_full_path(cls, repo_full_path):
1929 def get_by_full_path(cls, repo_full_path):
1930 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1930 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1931 repo_name = cls.normalize_repo_name(repo_name)
1931 repo_name = cls.normalize_repo_name(repo_name)
1932 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1932 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1933
1933
1934 @classmethod
1934 @classmethod
1935 def get_repo_forks(cls, repo_id):
1935 def get_repo_forks(cls, repo_id):
1936 return cls.query().filter(Repository.fork_id == repo_id)
1936 return cls.query().filter(Repository.fork_id == repo_id)
1937
1937
1938 @classmethod
1938 @classmethod
1939 def base_path(cls):
1939 def base_path(cls):
1940 """
1940 """
1941 Returns base path when all repos are stored
1941 Returns base path when all repos are stored
1942
1942
1943 :param cls:
1943 :param cls:
1944 """
1944 """
1945 from rhodecode.lib.utils import get_rhodecode_base_path
1945 from rhodecode.lib.utils import get_rhodecode_repo_store_path
1946 return get_rhodecode_base_path()
1946 return get_rhodecode_repo_store_path()
1947
1947
1948 @classmethod
1948 @classmethod
1949 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1949 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1950 case_insensitive=True, archived=False):
1950 case_insensitive=True, archived=False):
1951 q = Repository.query()
1951 q = Repository.query()
1952
1952
1953 if not archived:
1953 if not archived:
1954 q = q.filter(Repository.archived.isnot(true()))
1954 q = q.filter(Repository.archived.isnot(true()))
1955
1955
1956 if not isinstance(user_id, Optional):
1956 if not isinstance(user_id, Optional):
1957 q = q.filter(Repository.user_id == user_id)
1957 q = q.filter(Repository.user_id == user_id)
1958
1958
1959 if not isinstance(group_id, Optional):
1959 if not isinstance(group_id, Optional):
1960 q = q.filter(Repository.group_id == group_id)
1960 q = q.filter(Repository.group_id == group_id)
1961
1961
1962 if case_insensitive:
1962 if case_insensitive:
1963 q = q.order_by(func.lower(Repository.repo_name))
1963 q = q.order_by(func.lower(Repository.repo_name))
1964 else:
1964 else:
1965 q = q.order_by(Repository.repo_name)
1965 q = q.order_by(Repository.repo_name)
1966
1966
1967 return q.all()
1967 return q.all()
1968
1968
1969 @property
1969 @property
1970 def repo_uid(self):
1970 def repo_uid(self):
1971 return '_{}'.format(self.repo_id)
1971 return '_{}'.format(self.repo_id)
1972
1972
1973 @property
1973 @property
1974 def forks(self):
1974 def forks(self):
1975 """
1975 """
1976 Return forks of this repo
1976 Return forks of this repo
1977 """
1977 """
1978 return Repository.get_repo_forks(self.repo_id)
1978 return Repository.get_repo_forks(self.repo_id)
1979
1979
1980 @property
1980 @property
1981 def parent(self):
1981 def parent(self):
1982 """
1982 """
1983 Returns fork parent
1983 Returns fork parent
1984 """
1984 """
1985 return self.fork
1985 return self.fork
1986
1986
1987 @property
1987 @property
1988 def just_name(self):
1988 def just_name(self):
1989 return self.repo_name.split(self.NAME_SEP)[-1]
1989 return self.repo_name.split(self.NAME_SEP)[-1]
1990
1990
1991 @property
1991 @property
1992 def groups_with_parents(self):
1992 def groups_with_parents(self):
1993 groups = []
1993 groups = []
1994 if self.group is None:
1994 if self.group is None:
1995 return groups
1995 return groups
1996
1996
1997 cur_gr = self.group
1997 cur_gr = self.group
1998 groups.insert(0, cur_gr)
1998 groups.insert(0, cur_gr)
1999 while 1:
1999 while 1:
2000 gr = getattr(cur_gr, 'parent_group', None)
2000 gr = getattr(cur_gr, 'parent_group', None)
2001 cur_gr = cur_gr.parent_group
2001 cur_gr = cur_gr.parent_group
2002 if gr is None:
2002 if gr is None:
2003 break
2003 break
2004 groups.insert(0, gr)
2004 groups.insert(0, gr)
2005
2005
2006 return groups
2006 return groups
2007
2007
2008 @property
2008 @property
2009 def groups_and_repo(self):
2009 def groups_and_repo(self):
2010 return self.groups_with_parents, self
2010 return self.groups_with_parents, self
2011
2011
2012 @LazyProperty
2012 @property
2013 def repo_path(self):
2013 def repo_path(self):
2014 """
2014 """
2015 Returns base full path for that repository means where it actually
2015 Returns base full path for that repository means where it actually
2016 exists on a filesystem
2016 exists on a filesystem
2017 """
2017 """
2018 q = Session().query(RhodeCodeUi).filter(
2018 return self.base_path()
2019 RhodeCodeUi.ui_key == self.NAME_SEP)
2020 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2021 return q.one().ui_value
2022
2019
2023 @property
2020 @property
2024 def repo_full_path(self):
2021 def repo_full_path(self):
2025 p = [self.repo_path]
2022 p = [self.repo_path]
2026 # we need to split the name by / since this is how we store the
2023 # we need to split the name by / since this is how we store the
2027 # names in the database, but that eventually needs to be converted
2024 # names in the database, but that eventually needs to be converted
2028 # into a valid system path
2025 # into a valid system path
2029 p += self.repo_name.split(self.NAME_SEP)
2026 p += self.repo_name.split(self.NAME_SEP)
2030 return os.path.join(*map(safe_str, p))
2027 return os.path.join(*map(safe_str, p))
2031
2028
2032 @property
2029 @property
2033 def cache_keys(self):
2030 def cache_keys(self):
2034 """
2031 """
2035 Returns associated cache keys for that repo
2032 Returns associated cache keys for that repo
2036 """
2033 """
2037 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2034 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2038 return CacheKey.query()\
2035 return CacheKey.query()\
2039 .filter(CacheKey.cache_key == repo_namespace_key)\
2036 .filter(CacheKey.cache_key == repo_namespace_key)\
2040 .order_by(CacheKey.cache_key)\
2037 .order_by(CacheKey.cache_key)\
2041 .all()
2038 .all()
2042
2039
2043 @property
2040 @property
2044 def cached_diffs_relative_dir(self):
2041 def cached_diffs_relative_dir(self):
2045 """
2042 """
2046 Return a relative to the repository store path of cached diffs
2043 Return a relative to the repository store path of cached diffs
2047 used for safe display for users, who shouldn't know the absolute store
2044 used for safe display for users, who shouldn't know the absolute store
2048 path
2045 path
2049 """
2046 """
2050 return os.path.join(
2047 return os.path.join(
2051 os.path.dirname(self.repo_name),
2048 os.path.dirname(self.repo_name),
2052 self.cached_diffs_dir.split(os.path.sep)[-1])
2049 self.cached_diffs_dir.split(os.path.sep)[-1])
2053
2050
2054 @property
2051 @property
2055 def cached_diffs_dir(self):
2052 def cached_diffs_dir(self):
2056 path = self.repo_full_path
2053 path = self.repo_full_path
2057 return os.path.join(
2054 return os.path.join(
2058 os.path.dirname(path),
2055 os.path.dirname(path),
2059 f'.__shadow_diff_cache_repo_{self.repo_id}')
2056 f'.__shadow_diff_cache_repo_{self.repo_id}')
2060
2057
2061 def cached_diffs(self):
2058 def cached_diffs(self):
2062 diff_cache_dir = self.cached_diffs_dir
2059 diff_cache_dir = self.cached_diffs_dir
2063 if os.path.isdir(diff_cache_dir):
2060 if os.path.isdir(diff_cache_dir):
2064 return os.listdir(diff_cache_dir)
2061 return os.listdir(diff_cache_dir)
2065 return []
2062 return []
2066
2063
2067 def shadow_repos(self):
2064 def shadow_repos(self):
2068 shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}'
2065 shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}'
2069 return [
2066 return [
2070 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2067 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2071 if x.startswith(shadow_repos_pattern)
2068 if x.startswith(shadow_repos_pattern)
2072 ]
2069 ]
2073
2070
2074 def get_new_name(self, repo_name):
2071 def get_new_name(self, repo_name):
2075 """
2072 """
2076 returns new full repository name based on assigned group and new new
2073 returns new full repository name based on assigned group and new new
2077
2074
2078 :param repo_name:
2075 :param repo_name:
2079 """
2076 """
2080 path_prefix = self.group.full_path_splitted if self.group else []
2077 path_prefix = self.group.full_path_splitted if self.group else []
2081 return self.NAME_SEP.join(path_prefix + [repo_name])
2078 return self.NAME_SEP.join(path_prefix + [repo_name])
2082
2079
2083 @property
2080 @property
2084 def _config(self):
2081 def _config(self):
2085 """
2082 """
2086 Returns db based config object.
2083 Returns db based config object.
2087 """
2084 """
2088 from rhodecode.lib.utils import make_db_config
2085 from rhodecode.lib.utils import make_db_config
2089 return make_db_config(clear_session=False, repo=self)
2086 return make_db_config(clear_session=False, repo=self)
2090
2087
2091 def permissions(self, with_admins=True, with_owner=True,
2088 def permissions(self, with_admins=True, with_owner=True,
2092 expand_from_user_groups=False):
2089 expand_from_user_groups=False):
2093 """
2090 """
2094 Permissions for repositories
2091 Permissions for repositories
2095 """
2092 """
2096 _admin_perm = 'repository.admin'
2093 _admin_perm = 'repository.admin'
2097
2094
2098 owner_row = []
2095 owner_row = []
2099 if with_owner:
2096 if with_owner:
2100 usr = AttributeDict(self.user.get_dict())
2097 usr = AttributeDict(self.user.get_dict())
2101 usr.owner_row = True
2098 usr.owner_row = True
2102 usr.permission = _admin_perm
2099 usr.permission = _admin_perm
2103 usr.permission_id = None
2100 usr.permission_id = None
2104 owner_row.append(usr)
2101 owner_row.append(usr)
2105
2102
2106 super_admin_ids = []
2103 super_admin_ids = []
2107 super_admin_rows = []
2104 super_admin_rows = []
2108 if with_admins:
2105 if with_admins:
2109 for usr in User.get_all_super_admins():
2106 for usr in User.get_all_super_admins():
2110 super_admin_ids.append(usr.user_id)
2107 super_admin_ids.append(usr.user_id)
2111 # if this admin is also owner, don't double the record
2108 # if this admin is also owner, don't double the record
2112 if usr.user_id == owner_row[0].user_id:
2109 if usr.user_id == owner_row[0].user_id:
2113 owner_row[0].admin_row = True
2110 owner_row[0].admin_row = True
2114 else:
2111 else:
2115 usr = AttributeDict(usr.get_dict())
2112 usr = AttributeDict(usr.get_dict())
2116 usr.admin_row = True
2113 usr.admin_row = True
2117 usr.permission = _admin_perm
2114 usr.permission = _admin_perm
2118 usr.permission_id = None
2115 usr.permission_id = None
2119 super_admin_rows.append(usr)
2116 super_admin_rows.append(usr)
2120
2117
2121 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2118 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2122 q = q.options(joinedload(UserRepoToPerm.repository),
2119 q = q.options(joinedload(UserRepoToPerm.repository),
2123 joinedload(UserRepoToPerm.user),
2120 joinedload(UserRepoToPerm.user),
2124 joinedload(UserRepoToPerm.permission),)
2121 joinedload(UserRepoToPerm.permission),)
2125
2122
2126 # get owners and admins and permissions. We do a trick of re-writing
2123 # get owners and admins and permissions. We do a trick of re-writing
2127 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2124 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2128 # has a global reference and changing one object propagates to all
2125 # has a global reference and changing one object propagates to all
2129 # others. This means if admin is also an owner admin_row that change
2126 # others. This means if admin is also an owner admin_row that change
2130 # would propagate to both objects
2127 # would propagate to both objects
2131 perm_rows = []
2128 perm_rows = []
2132 for _usr in q.all():
2129 for _usr in q.all():
2133 usr = AttributeDict(_usr.user.get_dict())
2130 usr = AttributeDict(_usr.user.get_dict())
2134 # if this user is also owner/admin, mark as duplicate record
2131 # if this user is also owner/admin, mark as duplicate record
2135 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2132 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2136 usr.duplicate_perm = True
2133 usr.duplicate_perm = True
2137 # also check if this permission is maybe used by branch_permissions
2134 # also check if this permission is maybe used by branch_permissions
2138 if _usr.branch_perm_entry:
2135 if _usr.branch_perm_entry:
2139 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2136 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2140
2137
2141 usr.permission = _usr.permission.permission_name
2138 usr.permission = _usr.permission.permission_name
2142 usr.permission_id = _usr.repo_to_perm_id
2139 usr.permission_id = _usr.repo_to_perm_id
2143 perm_rows.append(usr)
2140 perm_rows.append(usr)
2144
2141
2145 # filter the perm rows by 'default' first and then sort them by
2142 # filter the perm rows by 'default' first and then sort them by
2146 # admin,write,read,none permissions sorted again alphabetically in
2143 # admin,write,read,none permissions sorted again alphabetically in
2147 # each group
2144 # each group
2148 perm_rows = sorted(perm_rows, key=display_user_sort)
2145 perm_rows = sorted(perm_rows, key=display_user_sort)
2149
2146
2150 user_groups_rows = []
2147 user_groups_rows = []
2151 if expand_from_user_groups:
2148 if expand_from_user_groups:
2152 for ug in self.permission_user_groups(with_members=True):
2149 for ug in self.permission_user_groups(with_members=True):
2153 for user_data in ug.members:
2150 for user_data in ug.members:
2154 user_groups_rows.append(user_data)
2151 user_groups_rows.append(user_data)
2155
2152
2156 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2153 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2157
2154
2158 def permission_user_groups(self, with_members=True):
2155 def permission_user_groups(self, with_members=True):
2159 q = UserGroupRepoToPerm.query()\
2156 q = UserGroupRepoToPerm.query()\
2160 .filter(UserGroupRepoToPerm.repository == self)
2157 .filter(UserGroupRepoToPerm.repository == self)
2161 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2158 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2162 joinedload(UserGroupRepoToPerm.users_group),
2159 joinedload(UserGroupRepoToPerm.users_group),
2163 joinedload(UserGroupRepoToPerm.permission),)
2160 joinedload(UserGroupRepoToPerm.permission),)
2164
2161
2165 perm_rows = []
2162 perm_rows = []
2166 for _user_group in q.all():
2163 for _user_group in q.all():
2167 entry = AttributeDict(_user_group.users_group.get_dict())
2164 entry = AttributeDict(_user_group.users_group.get_dict())
2168 entry.permission = _user_group.permission.permission_name
2165 entry.permission = _user_group.permission.permission_name
2169 if with_members:
2166 if with_members:
2170 entry.members = [x.user.get_dict()
2167 entry.members = [x.user.get_dict()
2171 for x in _user_group.users_group.members]
2168 for x in _user_group.users_group.members]
2172 perm_rows.append(entry)
2169 perm_rows.append(entry)
2173
2170
2174 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2171 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2175 return perm_rows
2172 return perm_rows
2176
2173
2177 def get_api_data(self, include_secrets=False):
2174 def get_api_data(self, include_secrets=False):
2178 """
2175 """
2179 Common function for generating repo api data
2176 Common function for generating repo api data
2180
2177
2181 :param include_secrets: See :meth:`User.get_api_data`.
2178 :param include_secrets: See :meth:`User.get_api_data`.
2182
2179
2183 """
2180 """
2184 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2181 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2185 # move this methods on models level.
2182 # move this methods on models level.
2186 from rhodecode.model.settings import SettingsModel
2183 from rhodecode.model.settings import SettingsModel
2187 from rhodecode.model.repo import RepoModel
2184 from rhodecode.model.repo import RepoModel
2188
2185
2189 repo = self
2186 repo = self
2190 _user_id, _time, _reason = self.locked
2187 _user_id, _time, _reason = self.locked
2191
2188
2192 data = {
2189 data = {
2193 'repo_id': repo.repo_id,
2190 'repo_id': repo.repo_id,
2194 'repo_name': repo.repo_name,
2191 'repo_name': repo.repo_name,
2195 'repo_type': repo.repo_type,
2192 'repo_type': repo.repo_type,
2196 'clone_uri': repo.clone_uri or '',
2193 'clone_uri': repo.clone_uri or '',
2197 'push_uri': repo.push_uri or '',
2194 'push_uri': repo.push_uri or '',
2198 'url': RepoModel().get_url(self),
2195 'url': RepoModel().get_url(self),
2199 'private': repo.private,
2196 'private': repo.private,
2200 'created_on': repo.created_on,
2197 'created_on': repo.created_on,
2201 'description': repo.description_safe,
2198 'description': repo.description_safe,
2202 'landing_rev': repo.landing_rev,
2199 'landing_rev': repo.landing_rev,
2203 'owner': repo.user.username,
2200 'owner': repo.user.username,
2204 'fork_of': repo.fork.repo_name if repo.fork else None,
2201 'fork_of': repo.fork.repo_name if repo.fork else None,
2205 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2202 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2206 'enable_statistics': repo.enable_statistics,
2203 'enable_statistics': repo.enable_statistics,
2207 'enable_locking': repo.enable_locking,
2204 'enable_locking': repo.enable_locking,
2208 'enable_downloads': repo.enable_downloads,
2205 'enable_downloads': repo.enable_downloads,
2209 'last_changeset': repo.changeset_cache,
2206 'last_changeset': repo.changeset_cache,
2210 'locked_by': User.get(_user_id).get_api_data(
2207 'locked_by': User.get(_user_id).get_api_data(
2211 include_secrets=include_secrets) if _user_id else None,
2208 include_secrets=include_secrets) if _user_id else None,
2212 'locked_date': time_to_datetime(_time) if _time else None,
2209 'locked_date': time_to_datetime(_time) if _time else None,
2213 'lock_reason': _reason if _reason else None,
2210 'lock_reason': _reason if _reason else None,
2214 }
2211 }
2215
2212
2216 # TODO: mikhail: should be per-repo settings here
2213 # TODO: mikhail: should be per-repo settings here
2217 rc_config = SettingsModel().get_all_settings()
2214 rc_config = SettingsModel().get_all_settings()
2218 repository_fields = str2bool(
2215 repository_fields = str2bool(
2219 rc_config.get('rhodecode_repository_fields'))
2216 rc_config.get('rhodecode_repository_fields'))
2220 if repository_fields:
2217 if repository_fields:
2221 for f in self.extra_fields:
2218 for f in self.extra_fields:
2222 data[f.field_key_prefixed] = f.field_value
2219 data[f.field_key_prefixed] = f.field_value
2223
2220
2224 return data
2221 return data
2225
2222
2226 @classmethod
2223 @classmethod
2227 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2224 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2228 if not lock_time:
2225 if not lock_time:
2229 lock_time = time.time()
2226 lock_time = time.time()
2230 if not lock_reason:
2227 if not lock_reason:
2231 lock_reason = cls.LOCK_AUTOMATIC
2228 lock_reason = cls.LOCK_AUTOMATIC
2232 repo.locked = [user_id, lock_time, lock_reason]
2229 repo.locked = [user_id, lock_time, lock_reason]
2233 Session().add(repo)
2230 Session().add(repo)
2234 Session().commit()
2231 Session().commit()
2235
2232
2236 @classmethod
2233 @classmethod
2237 def unlock(cls, repo):
2234 def unlock(cls, repo):
2238 repo.locked = None
2235 repo.locked = None
2239 Session().add(repo)
2236 Session().add(repo)
2240 Session().commit()
2237 Session().commit()
2241
2238
2242 @classmethod
2239 @classmethod
2243 def getlock(cls, repo):
2240 def getlock(cls, repo):
2244 return repo.locked
2241 return repo.locked
2245
2242
2246 def get_locking_state(self, action, user_id, only_when_enabled=True):
2243 def get_locking_state(self, action, user_id, only_when_enabled=True):
2247 """
2244 """
2248 Checks locking on this repository, if locking is enabled and lock is
2245 Checks locking on this repository, if locking is enabled and lock is
2249 present returns a tuple of make_lock, locked, locked_by.
2246 present returns a tuple of make_lock, locked, locked_by.
2250 make_lock can have 3 states None (do nothing) True, make lock
2247 make_lock can have 3 states None (do nothing) True, make lock
2251 False release lock, This value is later propagated to hooks, which
2248 False release lock, This value is later propagated to hooks, which
2252 do the locking. Think about this as signals passed to hooks what to do.
2249 do the locking. Think about this as signals passed to hooks what to do.
2253
2250
2254 """
2251 """
2255 # TODO: johbo: This is part of the business logic and should be moved
2252 # TODO: johbo: This is part of the business logic and should be moved
2256 # into the RepositoryModel.
2253 # into the RepositoryModel.
2257
2254
2258 if action not in ('push', 'pull'):
2255 if action not in ('push', 'pull'):
2259 raise ValueError("Invalid action value: %s" % repr(action))
2256 raise ValueError("Invalid action value: %s" % repr(action))
2260
2257
2261 # defines if locked error should be thrown to user
2258 # defines if locked error should be thrown to user
2262 currently_locked = False
2259 currently_locked = False
2263 # defines if new lock should be made, tri-state
2260 # defines if new lock should be made, tri-state
2264 make_lock = None
2261 make_lock = None
2265 repo = self
2262 repo = self
2266 user = User.get(user_id)
2263 user = User.get(user_id)
2267
2264
2268 lock_info = repo.locked
2265 lock_info = repo.locked
2269
2266
2270 if repo and (repo.enable_locking or not only_when_enabled):
2267 if repo and (repo.enable_locking or not only_when_enabled):
2271 if action == 'push':
2268 if action == 'push':
2272 # check if it's already locked !, if it is compare users
2269 # check if it's already locked !, if it is compare users
2273 locked_by_user_id = lock_info[0]
2270 locked_by_user_id = lock_info[0]
2274 if user.user_id == locked_by_user_id:
2271 if user.user_id == locked_by_user_id:
2275 log.debug(
2272 log.debug(
2276 'Got `push` action from user %s, now unlocking', user)
2273 'Got `push` action from user %s, now unlocking', user)
2277 # unlock if we have push from user who locked
2274 # unlock if we have push from user who locked
2278 make_lock = False
2275 make_lock = False
2279 else:
2276 else:
2280 # we're not the same user who locked, ban with
2277 # we're not the same user who locked, ban with
2281 # code defined in settings (default is 423 HTTP Locked) !
2278 # code defined in settings (default is 423 HTTP Locked) !
2282 log.debug('Repo %s is currently locked by %s', repo, user)
2279 log.debug('Repo %s is currently locked by %s', repo, user)
2283 currently_locked = True
2280 currently_locked = True
2284 elif action == 'pull':
2281 elif action == 'pull':
2285 # [0] user [1] date
2282 # [0] user [1] date
2286 if lock_info[0] and lock_info[1]:
2283 if lock_info[0] and lock_info[1]:
2287 log.debug('Repo %s is currently locked by %s', repo, user)
2284 log.debug('Repo %s is currently locked by %s', repo, user)
2288 currently_locked = True
2285 currently_locked = True
2289 else:
2286 else:
2290 log.debug('Setting lock on repo %s by %s', repo, user)
2287 log.debug('Setting lock on repo %s by %s', repo, user)
2291 make_lock = True
2288 make_lock = True
2292
2289
2293 else:
2290 else:
2294 log.debug('Repository %s do not have locking enabled', repo)
2291 log.debug('Repository %s do not have locking enabled', repo)
2295
2292
2296 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2293 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2297 make_lock, currently_locked, lock_info)
2294 make_lock, currently_locked, lock_info)
2298
2295
2299 from rhodecode.lib.auth import HasRepoPermissionAny
2296 from rhodecode.lib.auth import HasRepoPermissionAny
2300 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2297 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2301 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2298 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2302 # if we don't have at least write permission we cannot make a lock
2299 # if we don't have at least write permission we cannot make a lock
2303 log.debug('lock state reset back to FALSE due to lack '
2300 log.debug('lock state reset back to FALSE due to lack '
2304 'of at least read permission')
2301 'of at least read permission')
2305 make_lock = False
2302 make_lock = False
2306
2303
2307 return make_lock, currently_locked, lock_info
2304 return make_lock, currently_locked, lock_info
2308
2305
2309 @property
2306 @property
2310 def last_commit_cache_update_diff(self):
2307 def last_commit_cache_update_diff(self):
2311 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2308 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2312
2309
2313 @classmethod
2310 @classmethod
2314 def _load_commit_change(cls, last_commit_cache):
2311 def _load_commit_change(cls, last_commit_cache):
2315 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2312 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2316 empty_date = datetime.datetime.fromtimestamp(0)
2313 empty_date = datetime.datetime.fromtimestamp(0)
2317 date_latest = last_commit_cache.get('date', empty_date)
2314 date_latest = last_commit_cache.get('date', empty_date)
2318 try:
2315 try:
2319 return parse_datetime(date_latest)
2316 return parse_datetime(date_latest)
2320 except Exception:
2317 except Exception:
2321 return empty_date
2318 return empty_date
2322
2319
2323 @property
2320 @property
2324 def last_commit_change(self):
2321 def last_commit_change(self):
2325 return self._load_commit_change(self.changeset_cache)
2322 return self._load_commit_change(self.changeset_cache)
2326
2323
2327 @property
2324 @property
2328 def last_db_change(self):
2325 def last_db_change(self):
2329 return self.updated_on
2326 return self.updated_on
2330
2327
2331 @property
2328 @property
2332 def clone_uri_hidden(self):
2329 def clone_uri_hidden(self):
2333 clone_uri = self.clone_uri
2330 clone_uri = self.clone_uri
2334 if clone_uri:
2331 if clone_uri:
2335 import urlobject
2332 import urlobject
2336 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2333 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2337 if url_obj.password:
2334 if url_obj.password:
2338 clone_uri = url_obj.with_password('*****')
2335 clone_uri = url_obj.with_password('*****')
2339 return clone_uri
2336 return clone_uri
2340
2337
2341 @property
2338 @property
2342 def push_uri_hidden(self):
2339 def push_uri_hidden(self):
2343 push_uri = self.push_uri
2340 push_uri = self.push_uri
2344 if push_uri:
2341 if push_uri:
2345 import urlobject
2342 import urlobject
2346 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2343 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2347 if url_obj.password:
2344 if url_obj.password:
2348 push_uri = url_obj.with_password('*****')
2345 push_uri = url_obj.with_password('*****')
2349 return push_uri
2346 return push_uri
2350
2347
2351 def clone_url(self, **override):
2348 def clone_url(self, **override):
2352 from rhodecode.model.settings import SettingsModel
2349 from rhodecode.model.settings import SettingsModel
2353
2350
2354 uri_tmpl = None
2351 uri_tmpl = None
2355 if 'with_id' in override:
2352 if 'with_id' in override:
2356 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2353 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2357 del override['with_id']
2354 del override['with_id']
2358
2355
2359 if 'uri_tmpl' in override:
2356 if 'uri_tmpl' in override:
2360 uri_tmpl = override['uri_tmpl']
2357 uri_tmpl = override['uri_tmpl']
2361 del override['uri_tmpl']
2358 del override['uri_tmpl']
2362
2359
2363 ssh = False
2360 ssh = False
2364 if 'ssh' in override:
2361 if 'ssh' in override:
2365 ssh = True
2362 ssh = True
2366 del override['ssh']
2363 del override['ssh']
2367
2364
2368 # we didn't override our tmpl from **overrides
2365 # we didn't override our tmpl from **overrides
2369 request = get_current_request()
2366 request = get_current_request()
2370 if not uri_tmpl:
2367 if not uri_tmpl:
2371 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2368 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2372 rc_config = request.call_context.rc_config
2369 rc_config = request.call_context.rc_config
2373 else:
2370 else:
2374 rc_config = SettingsModel().get_all_settings(cache=True)
2371 rc_config = SettingsModel().get_all_settings(cache=True)
2375
2372
2376 if ssh:
2373 if ssh:
2377 uri_tmpl = rc_config.get(
2374 uri_tmpl = rc_config.get(
2378 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2375 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2379
2376
2380 else:
2377 else:
2381 uri_tmpl = rc_config.get(
2378 uri_tmpl = rc_config.get(
2382 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2379 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2383
2380
2384 return get_clone_url(request=request,
2381 return get_clone_url(request=request,
2385 uri_tmpl=uri_tmpl,
2382 uri_tmpl=uri_tmpl,
2386 repo_name=self.repo_name,
2383 repo_name=self.repo_name,
2387 repo_id=self.repo_id,
2384 repo_id=self.repo_id,
2388 repo_type=self.repo_type,
2385 repo_type=self.repo_type,
2389 **override)
2386 **override)
2390
2387
2391 def set_state(self, state):
2388 def set_state(self, state):
2392 self.repo_state = state
2389 self.repo_state = state
2393 Session().add(self)
2390 Session().add(self)
2394 #==========================================================================
2391 #==========================================================================
2395 # SCM PROPERTIES
2392 # SCM PROPERTIES
2396 #==========================================================================
2393 #==========================================================================
2397
2394
2398 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2395 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2399 return get_commit_safe(
2396 return get_commit_safe(
2400 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2397 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2401 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2398 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2402
2399
2403 def get_changeset(self, rev=None, pre_load=None):
2400 def get_changeset(self, rev=None, pre_load=None):
2404 warnings.warn("Use get_commit", DeprecationWarning)
2401 warnings.warn("Use get_commit", DeprecationWarning)
2405 commit_id = None
2402 commit_id = None
2406 commit_idx = None
2403 commit_idx = None
2407 if isinstance(rev, str):
2404 if isinstance(rev, str):
2408 commit_id = rev
2405 commit_id = rev
2409 else:
2406 else:
2410 commit_idx = rev
2407 commit_idx = rev
2411 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2408 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2412 pre_load=pre_load)
2409 pre_load=pre_load)
2413
2410
2414 def get_landing_commit(self):
2411 def get_landing_commit(self):
2415 """
2412 """
2416 Returns landing commit, or if that doesn't exist returns the tip
2413 Returns landing commit, or if that doesn't exist returns the tip
2417 """
2414 """
2418 _rev_type, _rev = self.landing_rev
2415 _rev_type, _rev = self.landing_rev
2419 commit = self.get_commit(_rev)
2416 commit = self.get_commit(_rev)
2420 if isinstance(commit, EmptyCommit):
2417 if isinstance(commit, EmptyCommit):
2421 return self.get_commit()
2418 return self.get_commit()
2422 return commit
2419 return commit
2423
2420
2424 def flush_commit_cache(self):
2421 def flush_commit_cache(self):
2425 self.update_commit_cache(cs_cache={'raw_id':'0'})
2422 self.update_commit_cache(cs_cache={'raw_id':'0'})
2426 self.update_commit_cache()
2423 self.update_commit_cache()
2427
2424
2428 def update_commit_cache(self, cs_cache=None, config=None):
2425 def update_commit_cache(self, cs_cache=None, config=None):
2429 """
2426 """
2430 Update cache of last commit for repository
2427 Update cache of last commit for repository
2431 cache_keys should be::
2428 cache_keys should be::
2432
2429
2433 source_repo_id
2430 source_repo_id
2434 short_id
2431 short_id
2435 raw_id
2432 raw_id
2436 revision
2433 revision
2437 parents
2434 parents
2438 message
2435 message
2439 date
2436 date
2440 author
2437 author
2441 updated_on
2438 updated_on
2442
2439
2443 """
2440 """
2444 from rhodecode.lib.vcs.backends.base import BaseCommit
2441 from rhodecode.lib.vcs.backends.base import BaseCommit
2445 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2442 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2446 empty_date = datetime.datetime.fromtimestamp(0)
2443 empty_date = datetime.datetime.fromtimestamp(0)
2447 repo_commit_count = 0
2444 repo_commit_count = 0
2448
2445
2449 if cs_cache is None:
2446 if cs_cache is None:
2450 # use no-cache version here
2447 # use no-cache version here
2451 try:
2448 try:
2452 scm_repo = self.scm_instance(cache=False, config=config)
2449 scm_repo = self.scm_instance(cache=False, config=config)
2453 except VCSError:
2450 except VCSError:
2454 scm_repo = None
2451 scm_repo = None
2455 empty = scm_repo is None or scm_repo.is_empty()
2452 empty = scm_repo is None or scm_repo.is_empty()
2456
2453
2457 if not empty:
2454 if not empty:
2458 cs_cache = scm_repo.get_commit(
2455 cs_cache = scm_repo.get_commit(
2459 pre_load=["author", "date", "message", "parents", "branch"])
2456 pre_load=["author", "date", "message", "parents", "branch"])
2460 repo_commit_count = scm_repo.count()
2457 repo_commit_count = scm_repo.count()
2461 else:
2458 else:
2462 cs_cache = EmptyCommit()
2459 cs_cache = EmptyCommit()
2463
2460
2464 if isinstance(cs_cache, BaseCommit):
2461 if isinstance(cs_cache, BaseCommit):
2465 cs_cache = cs_cache.__json__()
2462 cs_cache = cs_cache.__json__()
2466
2463
2467 def is_outdated(new_cs_cache):
2464 def is_outdated(new_cs_cache):
2468 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2465 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2469 new_cs_cache['revision'] != self.changeset_cache['revision']):
2466 new_cs_cache['revision'] != self.changeset_cache['revision']):
2470 return True
2467 return True
2471 return False
2468 return False
2472
2469
2473 # check if we have maybe already latest cached revision
2470 # check if we have maybe already latest cached revision
2474 if is_outdated(cs_cache) or not self.changeset_cache:
2471 if is_outdated(cs_cache) or not self.changeset_cache:
2475 _current_datetime = datetime.datetime.utcnow()
2472 _current_datetime = datetime.datetime.utcnow()
2476 last_change = cs_cache.get('date') or _current_datetime
2473 last_change = cs_cache.get('date') or _current_datetime
2477 # we check if last update is newer than the new value
2474 # we check if last update is newer than the new value
2478 # if yes, we use the current timestamp instead. Imagine you get
2475 # if yes, we use the current timestamp instead. Imagine you get
2479 # old commit pushed 1y ago, we'd set last update 1y to ago.
2476 # old commit pushed 1y ago, we'd set last update 1y to ago.
2480 last_change_timestamp = datetime_to_time(last_change)
2477 last_change_timestamp = datetime_to_time(last_change)
2481 current_timestamp = datetime_to_time(last_change)
2478 current_timestamp = datetime_to_time(last_change)
2482 if last_change_timestamp > current_timestamp and not empty:
2479 if last_change_timestamp > current_timestamp and not empty:
2483 cs_cache['date'] = _current_datetime
2480 cs_cache['date'] = _current_datetime
2484
2481
2485 # also store size of repo
2482 # also store size of repo
2486 cs_cache['repo_commit_count'] = repo_commit_count
2483 cs_cache['repo_commit_count'] = repo_commit_count
2487
2484
2488 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2485 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2489 cs_cache['updated_on'] = time.time()
2486 cs_cache['updated_on'] = time.time()
2490 self.changeset_cache = cs_cache
2487 self.changeset_cache = cs_cache
2491 self.updated_on = last_change
2488 self.updated_on = last_change
2492 Session().add(self)
2489 Session().add(self)
2493 Session().commit()
2490 Session().commit()
2494
2491
2495 else:
2492 else:
2496 if empty:
2493 if empty:
2497 cs_cache = EmptyCommit().__json__()
2494 cs_cache = EmptyCommit().__json__()
2498 else:
2495 else:
2499 cs_cache = self.changeset_cache
2496 cs_cache = self.changeset_cache
2500
2497
2501 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2498 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2502
2499
2503 cs_cache['updated_on'] = time.time()
2500 cs_cache['updated_on'] = time.time()
2504 self.changeset_cache = cs_cache
2501 self.changeset_cache = cs_cache
2505 self.updated_on = _date_latest
2502 self.updated_on = _date_latest
2506 Session().add(self)
2503 Session().add(self)
2507 Session().commit()
2504 Session().commit()
2508
2505
2509 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2506 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2510 self.repo_name, cs_cache, _date_latest)
2507 self.repo_name, cs_cache, _date_latest)
2511
2508
2512 @property
2509 @property
2513 def tip(self):
2510 def tip(self):
2514 return self.get_commit('tip')
2511 return self.get_commit('tip')
2515
2512
2516 @property
2513 @property
2517 def author(self):
2514 def author(self):
2518 return self.tip.author
2515 return self.tip.author
2519
2516
2520 @property
2517 @property
2521 def last_change(self):
2518 def last_change(self):
2522 return self.scm_instance().last_change
2519 return self.scm_instance().last_change
2523
2520
2524 def get_comments(self, revisions=None):
2521 def get_comments(self, revisions=None):
2525 """
2522 """
2526 Returns comments for this repository grouped by revisions
2523 Returns comments for this repository grouped by revisions
2527
2524
2528 :param revisions: filter query by revisions only
2525 :param revisions: filter query by revisions only
2529 """
2526 """
2530 cmts = ChangesetComment.query()\
2527 cmts = ChangesetComment.query()\
2531 .filter(ChangesetComment.repo == self)
2528 .filter(ChangesetComment.repo == self)
2532 if revisions:
2529 if revisions:
2533 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2530 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2534 grouped = collections.defaultdict(list)
2531 grouped = collections.defaultdict(list)
2535 for cmt in cmts.all():
2532 for cmt in cmts.all():
2536 grouped[cmt.revision].append(cmt)
2533 grouped[cmt.revision].append(cmt)
2537 return grouped
2534 return grouped
2538
2535
2539 def statuses(self, revisions=None):
2536 def statuses(self, revisions=None):
2540 """
2537 """
2541 Returns statuses for this repository
2538 Returns statuses for this repository
2542
2539
2543 :param revisions: list of revisions to get statuses for
2540 :param revisions: list of revisions to get statuses for
2544 """
2541 """
2545 statuses = ChangesetStatus.query()\
2542 statuses = ChangesetStatus.query()\
2546 .filter(ChangesetStatus.repo == self)\
2543 .filter(ChangesetStatus.repo == self)\
2547 .filter(ChangesetStatus.version == 0)
2544 .filter(ChangesetStatus.version == 0)
2548
2545
2549 if revisions:
2546 if revisions:
2550 # Try doing the filtering in chunks to avoid hitting limits
2547 # Try doing the filtering in chunks to avoid hitting limits
2551 size = 500
2548 size = 500
2552 status_results = []
2549 status_results = []
2553 for chunk in range(0, len(revisions), size):
2550 for chunk in range(0, len(revisions), size):
2554 status_results += statuses.filter(
2551 status_results += statuses.filter(
2555 ChangesetStatus.revision.in_(
2552 ChangesetStatus.revision.in_(
2556 revisions[chunk: chunk+size])
2553 revisions[chunk: chunk+size])
2557 ).all()
2554 ).all()
2558 else:
2555 else:
2559 status_results = statuses.all()
2556 status_results = statuses.all()
2560
2557
2561 grouped = {}
2558 grouped = {}
2562
2559
2563 # maybe we have open new pullrequest without a status?
2560 # maybe we have open new pullrequest without a status?
2564 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2561 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2565 status_lbl = ChangesetStatus.get_status_lbl(stat)
2562 status_lbl = ChangesetStatus.get_status_lbl(stat)
2566 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2563 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2567 for rev in pr.revisions:
2564 for rev in pr.revisions:
2568 pr_id = pr.pull_request_id
2565 pr_id = pr.pull_request_id
2569 pr_repo = pr.target_repo.repo_name
2566 pr_repo = pr.target_repo.repo_name
2570 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2567 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2571
2568
2572 for stat in status_results:
2569 for stat in status_results:
2573 pr_id = pr_repo = None
2570 pr_id = pr_repo = None
2574 if stat.pull_request:
2571 if stat.pull_request:
2575 pr_id = stat.pull_request.pull_request_id
2572 pr_id = stat.pull_request.pull_request_id
2576 pr_repo = stat.pull_request.target_repo.repo_name
2573 pr_repo = stat.pull_request.target_repo.repo_name
2577 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2574 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2578 pr_id, pr_repo]
2575 pr_id, pr_repo]
2579 return grouped
2576 return grouped
2580
2577
2581 # ==========================================================================
2578 # ==========================================================================
2582 # SCM CACHE INSTANCE
2579 # SCM CACHE INSTANCE
2583 # ==========================================================================
2580 # ==========================================================================
2584
2581
2585 def scm_instance(self, **kwargs):
2582 def scm_instance(self, **kwargs):
2586 import rhodecode
2583 import rhodecode
2587
2584
2588 # Passing a config will not hit the cache currently only used
2585 # Passing a config will not hit the cache currently only used
2589 # for repo2dbmapper
2586 # for repo2dbmapper
2590 config = kwargs.pop('config', None)
2587 config = kwargs.pop('config', None)
2591 cache = kwargs.pop('cache', None)
2588 cache = kwargs.pop('cache', None)
2592 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2589 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2593 if vcs_full_cache is not None:
2590 if vcs_full_cache is not None:
2594 # allows override global config
2591 # allows override global config
2595 full_cache = vcs_full_cache
2592 full_cache = vcs_full_cache
2596 else:
2593 else:
2597 full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache')
2594 full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache')
2598 # if cache is NOT defined use default global, else we have a full
2595 # if cache is NOT defined use default global, else we have a full
2599 # control over cache behaviour
2596 # control over cache behaviour
2600 if cache is None and full_cache and not config:
2597 if cache is None and full_cache and not config:
2601 log.debug('Initializing pure cached instance for %s', self.repo_path)
2598 log.debug('Initializing pure cached instance for %s', self.repo_path)
2602 return self._get_instance_cached()
2599 return self._get_instance_cached()
2603
2600
2604 # cache here is sent to the "vcs server"
2601 # cache here is sent to the "vcs server"
2605 return self._get_instance(cache=bool(cache), config=config)
2602 return self._get_instance(cache=bool(cache), config=config)
2606
2603
2607 def _get_instance_cached(self):
2604 def _get_instance_cached(self):
2608 from rhodecode.lib import rc_cache
2605 from rhodecode.lib import rc_cache
2609
2606
2610 cache_namespace_uid = f'repo_instance.{self.repo_id}'
2607 cache_namespace_uid = f'repo_instance.{self.repo_id}'
2611 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2608 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2612
2609
2613 # we must use thread scoped cache here,
2610 # we must use thread scoped cache here,
2614 # because each thread of gevent needs it's own not shared connection and cache
2611 # because each thread of gevent needs it's own not shared connection and cache
2615 # we also alter `args` so the cache key is individual for every green thread.
2612 # we also alter `args` so the cache key is individual for every green thread.
2616 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2613 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2617 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key, thread_scoped=True)
2614 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key, thread_scoped=True)
2618
2615
2619 # our wrapped caching function that takes state_uid to save the previous state in
2616 # our wrapped caching function that takes state_uid to save the previous state in
2620 def cache_generator(_state_uid):
2617 def cache_generator(_state_uid):
2621
2618
2622 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2619 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2623 def get_instance_cached(_repo_id, _process_context_id):
2620 def get_instance_cached(_repo_id, _process_context_id):
2624 # we save in cached func the generation state so we can detect a change and invalidate caches
2621 # we save in cached func the generation state so we can detect a change and invalidate caches
2625 return _state_uid, self._get_instance(repo_state_uid=_state_uid)
2622 return _state_uid, self._get_instance(repo_state_uid=_state_uid)
2626
2623
2627 return get_instance_cached
2624 return get_instance_cached
2628
2625
2629 with inv_context_manager as invalidation_context:
2626 with inv_context_manager as invalidation_context:
2630 cache_state_uid = invalidation_context.state_uid
2627 cache_state_uid = invalidation_context.state_uid
2631 cache_func = cache_generator(cache_state_uid)
2628 cache_func = cache_generator(cache_state_uid)
2632
2629
2633 args = self.repo_id, inv_context_manager.proc_key
2630 args = self.repo_id, inv_context_manager.proc_key
2634
2631
2635 previous_state_uid, instance = cache_func(*args)
2632 previous_state_uid, instance = cache_func(*args)
2636
2633
2637 # now compare keys, the "cache" state vs expected state.
2634 # now compare keys, the "cache" state vs expected state.
2638 if previous_state_uid != cache_state_uid:
2635 if previous_state_uid != cache_state_uid:
2639 log.warning('Cached state uid %s is different than current state uid %s',
2636 log.warning('Cached state uid %s is different than current state uid %s',
2640 previous_state_uid, cache_state_uid)
2637 previous_state_uid, cache_state_uid)
2641 _, instance = cache_func.refresh(*args)
2638 _, instance = cache_func.refresh(*args)
2642
2639
2643 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2640 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2644 return instance
2641 return instance
2645
2642
2646 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2643 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2647 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2644 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2648 self.repo_type, self.repo_path, cache)
2645 self.repo_type, self.repo_path, cache)
2649 config = config or self._config
2646 config = config or self._config
2650 custom_wire = {
2647 custom_wire = {
2651 'cache': cache, # controls the vcs.remote cache
2648 'cache': cache, # controls the vcs.remote cache
2652 'repo_state_uid': repo_state_uid
2649 'repo_state_uid': repo_state_uid
2653 }
2650 }
2654
2651
2655 repo = get_vcs_instance(
2652 repo = get_vcs_instance(
2656 repo_path=safe_str(self.repo_full_path),
2653 repo_path=safe_str(self.repo_full_path),
2657 config=config,
2654 config=config,
2658 with_wire=custom_wire,
2655 with_wire=custom_wire,
2659 create=False,
2656 create=False,
2660 _vcs_alias=self.repo_type)
2657 _vcs_alias=self.repo_type)
2661 if repo is not None:
2658 if repo is not None:
2662 repo.count() # cache rebuild
2659 repo.count() # cache rebuild
2663
2660
2664 return repo
2661 return repo
2665
2662
2666 def get_shadow_repository_path(self, workspace_id):
2663 def get_shadow_repository_path(self, workspace_id):
2667 from rhodecode.lib.vcs.backends.base import BaseRepository
2664 from rhodecode.lib.vcs.backends.base import BaseRepository
2668 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2665 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2669 self.repo_full_path, self.repo_id, workspace_id)
2666 self.repo_full_path, self.repo_id, workspace_id)
2670 return shadow_repo_path
2667 return shadow_repo_path
2671
2668
2672 def __json__(self):
2669 def __json__(self):
2673 return {'landing_rev': self.landing_rev}
2670 return {'landing_rev': self.landing_rev}
2674
2671
2675 def get_dict(self):
2672 def get_dict(self):
2676
2673
2677 # Since we transformed `repo_name` to a hybrid property, we need to
2674 # Since we transformed `repo_name` to a hybrid property, we need to
2678 # keep compatibility with the code which uses `repo_name` field.
2675 # keep compatibility with the code which uses `repo_name` field.
2679
2676
2680 result = super(Repository, self).get_dict()
2677 result = super(Repository, self).get_dict()
2681 result['repo_name'] = result.pop('_repo_name', None)
2678 result['repo_name'] = result.pop('_repo_name', None)
2682 result.pop('_changeset_cache', '')
2679 result.pop('_changeset_cache', '')
2683 return result
2680 return result
2684
2681
2685
2682
2686 class RepoGroup(Base, BaseModel):
2683 class RepoGroup(Base, BaseModel):
2687 __tablename__ = 'groups'
2684 __tablename__ = 'groups'
2688 __table_args__ = (
2685 __table_args__ = (
2689 UniqueConstraint('group_name', 'group_parent_id'),
2686 UniqueConstraint('group_name', 'group_parent_id'),
2690 base_table_args,
2687 base_table_args,
2691 )
2688 )
2692
2689
2693 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2690 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2694
2691
2695 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2692 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2696 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2693 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2697 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2694 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2698 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2695 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2699 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2696 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2700 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2697 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2701 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2698 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2702 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2699 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2703 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2700 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2704 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2701 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2705 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2702 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2706
2703
2707 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group')
2704 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group')
2708 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group')
2705 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group')
2709 parent_group = relationship('RepoGroup', remote_side=group_id)
2706 parent_group = relationship('RepoGroup', remote_side=group_id)
2710 user = relationship('User', back_populates='repository_groups')
2707 user = relationship('User', back_populates='repository_groups')
2711 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group')
2708 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group')
2712
2709
2713 # no cascade, set NULL
2710 # no cascade, set NULL
2714 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True)
2711 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True)
2715
2712
2716 def __init__(self, group_name='', parent_group=None):
2713 def __init__(self, group_name='', parent_group=None):
2717 self.group_name = group_name
2714 self.group_name = group_name
2718 self.parent_group = parent_group
2715 self.parent_group = parent_group
2719
2716
2720 def __repr__(self):
2717 def __repr__(self):
2721 return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>"
2718 return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>"
2722
2719
2723 @hybrid_property
2720 @hybrid_property
2724 def group_name(self):
2721 def group_name(self):
2725 return self._group_name
2722 return self._group_name
2726
2723
2727 @group_name.setter
2724 @group_name.setter
2728 def group_name(self, value):
2725 def group_name(self, value):
2729 self._group_name = value
2726 self._group_name = value
2730 self.group_name_hash = self.hash_repo_group_name(value)
2727 self.group_name_hash = self.hash_repo_group_name(value)
2731
2728
2732 @classmethod
2729 @classmethod
2733 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2730 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2734 from rhodecode.lib.vcs.backends.base import EmptyCommit
2731 from rhodecode.lib.vcs.backends.base import EmptyCommit
2735 dummy = EmptyCommit().__json__()
2732 dummy = EmptyCommit().__json__()
2736 if not changeset_cache_raw:
2733 if not changeset_cache_raw:
2737 dummy['source_repo_id'] = repo_id
2734 dummy['source_repo_id'] = repo_id
2738 return json.loads(json.dumps(dummy))
2735 return json.loads(json.dumps(dummy))
2739
2736
2740 try:
2737 try:
2741 return json.loads(changeset_cache_raw)
2738 return json.loads(changeset_cache_raw)
2742 except TypeError:
2739 except TypeError:
2743 return dummy
2740 return dummy
2744 except Exception:
2741 except Exception:
2745 log.error(traceback.format_exc())
2742 log.error(traceback.format_exc())
2746 return dummy
2743 return dummy
2747
2744
2748 @hybrid_property
2745 @hybrid_property
2749 def changeset_cache(self):
2746 def changeset_cache(self):
2750 return self._load_changeset_cache('', self._changeset_cache)
2747 return self._load_changeset_cache('', self._changeset_cache)
2751
2748
2752 @changeset_cache.setter
2749 @changeset_cache.setter
2753 def changeset_cache(self, val):
2750 def changeset_cache(self, val):
2754 try:
2751 try:
2755 self._changeset_cache = json.dumps(val)
2752 self._changeset_cache = json.dumps(val)
2756 except Exception:
2753 except Exception:
2757 log.error(traceback.format_exc())
2754 log.error(traceback.format_exc())
2758
2755
2759 @validates('group_parent_id')
2756 @validates('group_parent_id')
2760 def validate_group_parent_id(self, key, val):
2757 def validate_group_parent_id(self, key, val):
2761 """
2758 """
2762 Check cycle references for a parent group to self
2759 Check cycle references for a parent group to self
2763 """
2760 """
2764 if self.group_id and val:
2761 if self.group_id and val:
2765 assert val != self.group_id
2762 assert val != self.group_id
2766
2763
2767 return val
2764 return val
2768
2765
2769 @hybrid_property
2766 @hybrid_property
2770 def description_safe(self):
2767 def description_safe(self):
2771 from rhodecode.lib import helpers as h
2768 from rhodecode.lib import helpers as h
2772 return h.escape(self.group_description)
2769 return h.escape(self.group_description)
2773
2770
2774 @classmethod
2771 @classmethod
2775 def hash_repo_group_name(cls, repo_group_name):
2772 def hash_repo_group_name(cls, repo_group_name):
2776 val = remove_formatting(repo_group_name)
2773 val = remove_formatting(repo_group_name)
2777 val = safe_str(val).lower()
2774 val = safe_str(val).lower()
2778 chars = []
2775 chars = []
2779 for c in val:
2776 for c in val:
2780 if c not in string.ascii_letters:
2777 if c not in string.ascii_letters:
2781 c = str(ord(c))
2778 c = str(ord(c))
2782 chars.append(c)
2779 chars.append(c)
2783
2780
2784 return ''.join(chars)
2781 return ''.join(chars)
2785
2782
2786 @classmethod
2783 @classmethod
2787 def _generate_choice(cls, repo_group):
2784 def _generate_choice(cls, repo_group):
2788 from webhelpers2.html import literal as _literal
2785 from webhelpers2.html import literal as _literal
2789
2786
2790 def _name(k):
2787 def _name(k):
2791 return _literal(cls.CHOICES_SEPARATOR.join(k))
2788 return _literal(cls.CHOICES_SEPARATOR.join(k))
2792
2789
2793 return repo_group.group_id, _name(repo_group.full_path_splitted)
2790 return repo_group.group_id, _name(repo_group.full_path_splitted)
2794
2791
2795 @classmethod
2792 @classmethod
2796 def groups_choices(cls, groups=None, show_empty_group=True):
2793 def groups_choices(cls, groups=None, show_empty_group=True):
2797 if not groups:
2794 if not groups:
2798 groups = cls.query().all()
2795 groups = cls.query().all()
2799
2796
2800 repo_groups = []
2797 repo_groups = []
2801 if show_empty_group:
2798 if show_empty_group:
2802 repo_groups = [(-1, '-- %s --' % _('No parent'))]
2799 repo_groups = [(-1, '-- %s --' % _('No parent'))]
2803
2800
2804 repo_groups.extend([cls._generate_choice(x) for x in groups])
2801 repo_groups.extend([cls._generate_choice(x) for x in groups])
2805
2802
2806 repo_groups = sorted(
2803 repo_groups = sorted(
2807 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2804 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2808 return repo_groups
2805 return repo_groups
2809
2806
2810 @classmethod
2807 @classmethod
2811 def url_sep(cls):
2808 def url_sep(cls):
2812 return URL_SEP
2809 return URL_SEP
2813
2810
2814 @classmethod
2811 @classmethod
2815 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2812 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2816 if case_insensitive:
2813 if case_insensitive:
2817 gr = cls.query().filter(func.lower(cls.group_name)
2814 gr = cls.query().filter(func.lower(cls.group_name)
2818 == func.lower(group_name))
2815 == func.lower(group_name))
2819 else:
2816 else:
2820 gr = cls.query().filter(cls.group_name == group_name)
2817 gr = cls.query().filter(cls.group_name == group_name)
2821 if cache:
2818 if cache:
2822 name_key = _hash_key(group_name)
2819 name_key = _hash_key(group_name)
2823 gr = gr.options(
2820 gr = gr.options(
2824 FromCache("sql_cache_short", f"get_group_{name_key}"))
2821 FromCache("sql_cache_short", f"get_group_{name_key}"))
2825 return gr.scalar()
2822 return gr.scalar()
2826
2823
2827 @classmethod
2824 @classmethod
2828 def get_user_personal_repo_group(cls, user_id):
2825 def get_user_personal_repo_group(cls, user_id):
2829 user = User.get(user_id)
2826 user = User.get(user_id)
2830 if user.username == User.DEFAULT_USER:
2827 if user.username == User.DEFAULT_USER:
2831 return None
2828 return None
2832
2829
2833 return cls.query()\
2830 return cls.query()\
2834 .filter(cls.personal == true()) \
2831 .filter(cls.personal == true()) \
2835 .filter(cls.user == user) \
2832 .filter(cls.user == user) \
2836 .order_by(cls.group_id.asc()) \
2833 .order_by(cls.group_id.asc()) \
2837 .first()
2834 .first()
2838
2835
2839 @classmethod
2836 @classmethod
2840 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2837 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2841 case_insensitive=True):
2838 case_insensitive=True):
2842 q = RepoGroup.query()
2839 q = RepoGroup.query()
2843
2840
2844 if not isinstance(user_id, Optional):
2841 if not isinstance(user_id, Optional):
2845 q = q.filter(RepoGroup.user_id == user_id)
2842 q = q.filter(RepoGroup.user_id == user_id)
2846
2843
2847 if not isinstance(group_id, Optional):
2844 if not isinstance(group_id, Optional):
2848 q = q.filter(RepoGroup.group_parent_id == group_id)
2845 q = q.filter(RepoGroup.group_parent_id == group_id)
2849
2846
2850 if case_insensitive:
2847 if case_insensitive:
2851 q = q.order_by(func.lower(RepoGroup.group_name))
2848 q = q.order_by(func.lower(RepoGroup.group_name))
2852 else:
2849 else:
2853 q = q.order_by(RepoGroup.group_name)
2850 q = q.order_by(RepoGroup.group_name)
2854 return q.all()
2851 return q.all()
2855
2852
2856 @property
2853 @property
2857 def parents(self, parents_recursion_limit=10):
2854 def parents(self, parents_recursion_limit=10):
2858 groups = []
2855 groups = []
2859 if self.parent_group is None:
2856 if self.parent_group is None:
2860 return groups
2857 return groups
2861 cur_gr = self.parent_group
2858 cur_gr = self.parent_group
2862 groups.insert(0, cur_gr)
2859 groups.insert(0, cur_gr)
2863 cnt = 0
2860 cnt = 0
2864 while 1:
2861 while 1:
2865 cnt += 1
2862 cnt += 1
2866 gr = getattr(cur_gr, 'parent_group', None)
2863 gr = getattr(cur_gr, 'parent_group', None)
2867 cur_gr = cur_gr.parent_group
2864 cur_gr = cur_gr.parent_group
2868 if gr is None:
2865 if gr is None:
2869 break
2866 break
2870 if cnt == parents_recursion_limit:
2867 if cnt == parents_recursion_limit:
2871 # this will prevent accidental infinit loops
2868 # this will prevent accidental infinit loops
2872 log.error('more than %s parents found for group %s, stopping '
2869 log.error('more than %s parents found for group %s, stopping '
2873 'recursive parent fetching', parents_recursion_limit, self)
2870 'recursive parent fetching', parents_recursion_limit, self)
2874 break
2871 break
2875
2872
2876 groups.insert(0, gr)
2873 groups.insert(0, gr)
2877 return groups
2874 return groups
2878
2875
2879 @property
2876 @property
2880 def last_commit_cache_update_diff(self):
2877 def last_commit_cache_update_diff(self):
2881 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2878 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2882
2879
2883 @classmethod
2880 @classmethod
2884 def _load_commit_change(cls, last_commit_cache):
2881 def _load_commit_change(cls, last_commit_cache):
2885 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2882 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2886 empty_date = datetime.datetime.fromtimestamp(0)
2883 empty_date = datetime.datetime.fromtimestamp(0)
2887 date_latest = last_commit_cache.get('date', empty_date)
2884 date_latest = last_commit_cache.get('date', empty_date)
2888 try:
2885 try:
2889 return parse_datetime(date_latest)
2886 return parse_datetime(date_latest)
2890 except Exception:
2887 except Exception:
2891 return empty_date
2888 return empty_date
2892
2889
2893 @property
2890 @property
2894 def last_commit_change(self):
2891 def last_commit_change(self):
2895 return self._load_commit_change(self.changeset_cache)
2892 return self._load_commit_change(self.changeset_cache)
2896
2893
2897 @property
2894 @property
2898 def last_db_change(self):
2895 def last_db_change(self):
2899 return self.updated_on
2896 return self.updated_on
2900
2897
2901 @property
2898 @property
2902 def children(self):
2899 def children(self):
2903 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2900 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2904
2901
2905 @property
2902 @property
2906 def name(self):
2903 def name(self):
2907 return self.group_name.split(RepoGroup.url_sep())[-1]
2904 return self.group_name.split(RepoGroup.url_sep())[-1]
2908
2905
2909 @property
2906 @property
2910 def full_path(self):
2907 def full_path(self):
2911 return self.group_name
2908 return self.group_name
2912
2909
2913 @property
2910 @property
2914 def full_path_splitted(self):
2911 def full_path_splitted(self):
2915 return self.group_name.split(RepoGroup.url_sep())
2912 return self.group_name.split(RepoGroup.url_sep())
2916
2913
2917 @property
2914 @property
2918 def repositories(self):
2915 def repositories(self):
2919 return Repository.query()\
2916 return Repository.query()\
2920 .filter(Repository.group == self)\
2917 .filter(Repository.group == self)\
2921 .order_by(Repository.repo_name)
2918 .order_by(Repository.repo_name)
2922
2919
2923 @property
2920 @property
2924 def repositories_recursive_count(self):
2921 def repositories_recursive_count(self):
2925 cnt = self.repositories.count()
2922 cnt = self.repositories.count()
2926
2923
2927 def children_count(group):
2924 def children_count(group):
2928 cnt = 0
2925 cnt = 0
2929 for child in group.children:
2926 for child in group.children:
2930 cnt += child.repositories.count()
2927 cnt += child.repositories.count()
2931 cnt += children_count(child)
2928 cnt += children_count(child)
2932 return cnt
2929 return cnt
2933
2930
2934 return cnt + children_count(self)
2931 return cnt + children_count(self)
2935
2932
2936 def _recursive_objects(self, include_repos=True, include_groups=True):
2933 def _recursive_objects(self, include_repos=True, include_groups=True):
2937 all_ = []
2934 all_ = []
2938
2935
2939 def _get_members(root_gr):
2936 def _get_members(root_gr):
2940 if include_repos:
2937 if include_repos:
2941 for r in root_gr.repositories:
2938 for r in root_gr.repositories:
2942 all_.append(r)
2939 all_.append(r)
2943 childs = root_gr.children.all()
2940 childs = root_gr.children.all()
2944 if childs:
2941 if childs:
2945 for gr in childs:
2942 for gr in childs:
2946 if include_groups:
2943 if include_groups:
2947 all_.append(gr)
2944 all_.append(gr)
2948 _get_members(gr)
2945 _get_members(gr)
2949
2946
2950 root_group = []
2947 root_group = []
2951 if include_groups:
2948 if include_groups:
2952 root_group = [self]
2949 root_group = [self]
2953
2950
2954 _get_members(self)
2951 _get_members(self)
2955 return root_group + all_
2952 return root_group + all_
2956
2953
2957 def recursive_groups_and_repos(self):
2954 def recursive_groups_and_repos(self):
2958 """
2955 """
2959 Recursive return all groups, with repositories in those groups
2956 Recursive return all groups, with repositories in those groups
2960 """
2957 """
2961 return self._recursive_objects()
2958 return self._recursive_objects()
2962
2959
2963 def recursive_groups(self):
2960 def recursive_groups(self):
2964 """
2961 """
2965 Returns all children groups for this group including children of children
2962 Returns all children groups for this group including children of children
2966 """
2963 """
2967 return self._recursive_objects(include_repos=False)
2964 return self._recursive_objects(include_repos=False)
2968
2965
2969 def recursive_repos(self):
2966 def recursive_repos(self):
2970 """
2967 """
2971 Returns all children repositories for this group
2968 Returns all children repositories for this group
2972 """
2969 """
2973 return self._recursive_objects(include_groups=False)
2970 return self._recursive_objects(include_groups=False)
2974
2971
2975 def get_new_name(self, group_name):
2972 def get_new_name(self, group_name):
2976 """
2973 """
2977 returns new full group name based on parent and new name
2974 returns new full group name based on parent and new name
2978
2975
2979 :param group_name:
2976 :param group_name:
2980 """
2977 """
2981 path_prefix = (self.parent_group.full_path_splitted if
2978 path_prefix = (self.parent_group.full_path_splitted if
2982 self.parent_group else [])
2979 self.parent_group else [])
2983 return RepoGroup.url_sep().join(path_prefix + [group_name])
2980 return RepoGroup.url_sep().join(path_prefix + [group_name])
2984
2981
2985 def update_commit_cache(self, config=None):
2982 def update_commit_cache(self, config=None):
2986 """
2983 """
2987 Update cache of last commit for newest repository inside this repository group.
2984 Update cache of last commit for newest repository inside this repository group.
2988 cache_keys should be::
2985 cache_keys should be::
2989
2986
2990 source_repo_id
2987 source_repo_id
2991 short_id
2988 short_id
2992 raw_id
2989 raw_id
2993 revision
2990 revision
2994 parents
2991 parents
2995 message
2992 message
2996 date
2993 date
2997 author
2994 author
2998
2995
2999 """
2996 """
3000 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2997 from rhodecode.lib.vcs.utils.helpers import parse_datetime
3001 empty_date = datetime.datetime.fromtimestamp(0)
2998 empty_date = datetime.datetime.fromtimestamp(0)
3002
2999
3003 def repo_groups_and_repos(root_gr):
3000 def repo_groups_and_repos(root_gr):
3004 for _repo in root_gr.repositories:
3001 for _repo in root_gr.repositories:
3005 yield _repo
3002 yield _repo
3006 for child_group in root_gr.children.all():
3003 for child_group in root_gr.children.all():
3007 yield child_group
3004 yield child_group
3008
3005
3009 latest_repo_cs_cache = {}
3006 latest_repo_cs_cache = {}
3010 for obj in repo_groups_and_repos(self):
3007 for obj in repo_groups_and_repos(self):
3011 repo_cs_cache = obj.changeset_cache
3008 repo_cs_cache = obj.changeset_cache
3012 date_latest = latest_repo_cs_cache.get('date', empty_date)
3009 date_latest = latest_repo_cs_cache.get('date', empty_date)
3013 date_current = repo_cs_cache.get('date', empty_date)
3010 date_current = repo_cs_cache.get('date', empty_date)
3014 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3011 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3015 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3012 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3016 latest_repo_cs_cache = repo_cs_cache
3013 latest_repo_cs_cache = repo_cs_cache
3017 if hasattr(obj, 'repo_id'):
3014 if hasattr(obj, 'repo_id'):
3018 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3015 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3019 else:
3016 else:
3020 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3017 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3021
3018
3022 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3019 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3023
3020
3024 latest_repo_cs_cache['updated_on'] = time.time()
3021 latest_repo_cs_cache['updated_on'] = time.time()
3025 self.changeset_cache = latest_repo_cs_cache
3022 self.changeset_cache = latest_repo_cs_cache
3026 self.updated_on = _date_latest
3023 self.updated_on = _date_latest
3027 Session().add(self)
3024 Session().add(self)
3028 Session().commit()
3025 Session().commit()
3029
3026
3030 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3027 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3031 self.group_name, latest_repo_cs_cache, _date_latest)
3028 self.group_name, latest_repo_cs_cache, _date_latest)
3032
3029
3033 def permissions(self, with_admins=True, with_owner=True,
3030 def permissions(self, with_admins=True, with_owner=True,
3034 expand_from_user_groups=False):
3031 expand_from_user_groups=False):
3035 """
3032 """
3036 Permissions for repository groups
3033 Permissions for repository groups
3037 """
3034 """
3038 _admin_perm = 'group.admin'
3035 _admin_perm = 'group.admin'
3039
3036
3040 owner_row = []
3037 owner_row = []
3041 if with_owner:
3038 if with_owner:
3042 usr = AttributeDict(self.user.get_dict())
3039 usr = AttributeDict(self.user.get_dict())
3043 usr.owner_row = True
3040 usr.owner_row = True
3044 usr.permission = _admin_perm
3041 usr.permission = _admin_perm
3045 owner_row.append(usr)
3042 owner_row.append(usr)
3046
3043
3047 super_admin_ids = []
3044 super_admin_ids = []
3048 super_admin_rows = []
3045 super_admin_rows = []
3049 if with_admins:
3046 if with_admins:
3050 for usr in User.get_all_super_admins():
3047 for usr in User.get_all_super_admins():
3051 super_admin_ids.append(usr.user_id)
3048 super_admin_ids.append(usr.user_id)
3052 # if this admin is also owner, don't double the record
3049 # if this admin is also owner, don't double the record
3053 if usr.user_id == owner_row[0].user_id:
3050 if usr.user_id == owner_row[0].user_id:
3054 owner_row[0].admin_row = True
3051 owner_row[0].admin_row = True
3055 else:
3052 else:
3056 usr = AttributeDict(usr.get_dict())
3053 usr = AttributeDict(usr.get_dict())
3057 usr.admin_row = True
3054 usr.admin_row = True
3058 usr.permission = _admin_perm
3055 usr.permission = _admin_perm
3059 super_admin_rows.append(usr)
3056 super_admin_rows.append(usr)
3060
3057
3061 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3058 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3062 q = q.options(joinedload(UserRepoGroupToPerm.group),
3059 q = q.options(joinedload(UserRepoGroupToPerm.group),
3063 joinedload(UserRepoGroupToPerm.user),
3060 joinedload(UserRepoGroupToPerm.user),
3064 joinedload(UserRepoGroupToPerm.permission),)
3061 joinedload(UserRepoGroupToPerm.permission),)
3065
3062
3066 # get owners and admins and permissions. We do a trick of re-writing
3063 # get owners and admins and permissions. We do a trick of re-writing
3067 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3064 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3068 # has a global reference and changing one object propagates to all
3065 # has a global reference and changing one object propagates to all
3069 # others. This means if admin is also an owner admin_row that change
3066 # others. This means if admin is also an owner admin_row that change
3070 # would propagate to both objects
3067 # would propagate to both objects
3071 perm_rows = []
3068 perm_rows = []
3072 for _usr in q.all():
3069 for _usr in q.all():
3073 usr = AttributeDict(_usr.user.get_dict())
3070 usr = AttributeDict(_usr.user.get_dict())
3074 # if this user is also owner/admin, mark as duplicate record
3071 # if this user is also owner/admin, mark as duplicate record
3075 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3072 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3076 usr.duplicate_perm = True
3073 usr.duplicate_perm = True
3077 usr.permission = _usr.permission.permission_name
3074 usr.permission = _usr.permission.permission_name
3078 perm_rows.append(usr)
3075 perm_rows.append(usr)
3079
3076
3080 # filter the perm rows by 'default' first and then sort them by
3077 # filter the perm rows by 'default' first and then sort them by
3081 # admin,write,read,none permissions sorted again alphabetically in
3078 # admin,write,read,none permissions sorted again alphabetically in
3082 # each group
3079 # each group
3083 perm_rows = sorted(perm_rows, key=display_user_sort)
3080 perm_rows = sorted(perm_rows, key=display_user_sort)
3084
3081
3085 user_groups_rows = []
3082 user_groups_rows = []
3086 if expand_from_user_groups:
3083 if expand_from_user_groups:
3087 for ug in self.permission_user_groups(with_members=True):
3084 for ug in self.permission_user_groups(with_members=True):
3088 for user_data in ug.members:
3085 for user_data in ug.members:
3089 user_groups_rows.append(user_data)
3086 user_groups_rows.append(user_data)
3090
3087
3091 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3088 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3092
3089
3093 def permission_user_groups(self, with_members=False):
3090 def permission_user_groups(self, with_members=False):
3094 q = UserGroupRepoGroupToPerm.query()\
3091 q = UserGroupRepoGroupToPerm.query()\
3095 .filter(UserGroupRepoGroupToPerm.group == self)
3092 .filter(UserGroupRepoGroupToPerm.group == self)
3096 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3093 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3097 joinedload(UserGroupRepoGroupToPerm.users_group),
3094 joinedload(UserGroupRepoGroupToPerm.users_group),
3098 joinedload(UserGroupRepoGroupToPerm.permission),)
3095 joinedload(UserGroupRepoGroupToPerm.permission),)
3099
3096
3100 perm_rows = []
3097 perm_rows = []
3101 for _user_group in q.all():
3098 for _user_group in q.all():
3102 entry = AttributeDict(_user_group.users_group.get_dict())
3099 entry = AttributeDict(_user_group.users_group.get_dict())
3103 entry.permission = _user_group.permission.permission_name
3100 entry.permission = _user_group.permission.permission_name
3104 if with_members:
3101 if with_members:
3105 entry.members = [x.user.get_dict()
3102 entry.members = [x.user.get_dict()
3106 for x in _user_group.users_group.members]
3103 for x in _user_group.users_group.members]
3107 perm_rows.append(entry)
3104 perm_rows.append(entry)
3108
3105
3109 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3106 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3110 return perm_rows
3107 return perm_rows
3111
3108
3112 def get_api_data(self):
3109 def get_api_data(self):
3113 """
3110 """
3114 Common function for generating api data
3111 Common function for generating api data
3115
3112
3116 """
3113 """
3117 group = self
3114 group = self
3118 data = {
3115 data = {
3119 'group_id': group.group_id,
3116 'group_id': group.group_id,
3120 'group_name': group.group_name,
3117 'group_name': group.group_name,
3121 'group_description': group.description_safe,
3118 'group_description': group.description_safe,
3122 'parent_group': group.parent_group.group_name if group.parent_group else None,
3119 'parent_group': group.parent_group.group_name if group.parent_group else None,
3123 'repositories': [x.repo_name for x in group.repositories],
3120 'repositories': [x.repo_name for x in group.repositories],
3124 'owner': group.user.username,
3121 'owner': group.user.username,
3125 }
3122 }
3126 return data
3123 return data
3127
3124
3128 def get_dict(self):
3125 def get_dict(self):
3129 # Since we transformed `group_name` to a hybrid property, we need to
3126 # Since we transformed `group_name` to a hybrid property, we need to
3130 # keep compatibility with the code which uses `group_name` field.
3127 # keep compatibility with the code which uses `group_name` field.
3131 result = super(RepoGroup, self).get_dict()
3128 result = super(RepoGroup, self).get_dict()
3132 result['group_name'] = result.pop('_group_name', None)
3129 result['group_name'] = result.pop('_group_name', None)
3133 result.pop('_changeset_cache', '')
3130 result.pop('_changeset_cache', '')
3134 return result
3131 return result
3135
3132
3136
3133
3137 class Permission(Base, BaseModel):
3134 class Permission(Base, BaseModel):
3138 __tablename__ = 'permissions'
3135 __tablename__ = 'permissions'
3139 __table_args__ = (
3136 __table_args__ = (
3140 Index('p_perm_name_idx', 'permission_name'),
3137 Index('p_perm_name_idx', 'permission_name'),
3141 base_table_args,
3138 base_table_args,
3142 )
3139 )
3143
3140
3144 PERMS = [
3141 PERMS = [
3145 ('hg.admin', _('RhodeCode Super Administrator')),
3142 ('hg.admin', _('RhodeCode Super Administrator')),
3146
3143
3147 ('repository.none', _('Repository no access')),
3144 ('repository.none', _('Repository no access')),
3148 ('repository.read', _('Repository read access')),
3145 ('repository.read', _('Repository read access')),
3149 ('repository.write', _('Repository write access')),
3146 ('repository.write', _('Repository write access')),
3150 ('repository.admin', _('Repository admin access')),
3147 ('repository.admin', _('Repository admin access')),
3151
3148
3152 ('group.none', _('Repository group no access')),
3149 ('group.none', _('Repository group no access')),
3153 ('group.read', _('Repository group read access')),
3150 ('group.read', _('Repository group read access')),
3154 ('group.write', _('Repository group write access')),
3151 ('group.write', _('Repository group write access')),
3155 ('group.admin', _('Repository group admin access')),
3152 ('group.admin', _('Repository group admin access')),
3156
3153
3157 ('usergroup.none', _('User group no access')),
3154 ('usergroup.none', _('User group no access')),
3158 ('usergroup.read', _('User group read access')),
3155 ('usergroup.read', _('User group read access')),
3159 ('usergroup.write', _('User group write access')),
3156 ('usergroup.write', _('User group write access')),
3160 ('usergroup.admin', _('User group admin access')),
3157 ('usergroup.admin', _('User group admin access')),
3161
3158
3162 ('branch.none', _('Branch no permissions')),
3159 ('branch.none', _('Branch no permissions')),
3163 ('branch.merge', _('Branch access by web merge')),
3160 ('branch.merge', _('Branch access by web merge')),
3164 ('branch.push', _('Branch access by push')),
3161 ('branch.push', _('Branch access by push')),
3165 ('branch.push_force', _('Branch access by push with force')),
3162 ('branch.push_force', _('Branch access by push with force')),
3166
3163
3167 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3164 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3168 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3165 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3169
3166
3170 ('hg.usergroup.create.false', _('User Group creation disabled')),
3167 ('hg.usergroup.create.false', _('User Group creation disabled')),
3171 ('hg.usergroup.create.true', _('User Group creation enabled')),
3168 ('hg.usergroup.create.true', _('User Group creation enabled')),
3172
3169
3173 ('hg.create.none', _('Repository creation disabled')),
3170 ('hg.create.none', _('Repository creation disabled')),
3174 ('hg.create.repository', _('Repository creation enabled')),
3171 ('hg.create.repository', _('Repository creation enabled')),
3175 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3172 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3176 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3173 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3177
3174
3178 ('hg.fork.none', _('Repository forking disabled')),
3175 ('hg.fork.none', _('Repository forking disabled')),
3179 ('hg.fork.repository', _('Repository forking enabled')),
3176 ('hg.fork.repository', _('Repository forking enabled')),
3180
3177
3181 ('hg.register.none', _('Registration disabled')),
3178 ('hg.register.none', _('Registration disabled')),
3182 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3179 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3183 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3180 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3184
3181
3185 ('hg.password_reset.enabled', _('Password reset enabled')),
3182 ('hg.password_reset.enabled', _('Password reset enabled')),
3186 ('hg.password_reset.hidden', _('Password reset hidden')),
3183 ('hg.password_reset.hidden', _('Password reset hidden')),
3187 ('hg.password_reset.disabled', _('Password reset disabled')),
3184 ('hg.password_reset.disabled', _('Password reset disabled')),
3188
3185
3189 ('hg.extern_activate.manual', _('Manual activation of external account')),
3186 ('hg.extern_activate.manual', _('Manual activation of external account')),
3190 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3187 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3191
3188
3192 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3189 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3193 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3190 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3194 ]
3191 ]
3195
3192
3196 # definition of system default permissions for DEFAULT user, created on
3193 # definition of system default permissions for DEFAULT user, created on
3197 # system setup
3194 # system setup
3198 DEFAULT_USER_PERMISSIONS = [
3195 DEFAULT_USER_PERMISSIONS = [
3199 # object perms
3196 # object perms
3200 'repository.read',
3197 'repository.read',
3201 'group.read',
3198 'group.read',
3202 'usergroup.read',
3199 'usergroup.read',
3203 # branch, for backward compat we need same value as before so forced pushed
3200 # branch, for backward compat we need same value as before so forced pushed
3204 'branch.push_force',
3201 'branch.push_force',
3205 # global
3202 # global
3206 'hg.create.repository',
3203 'hg.create.repository',
3207 'hg.repogroup.create.false',
3204 'hg.repogroup.create.false',
3208 'hg.usergroup.create.false',
3205 'hg.usergroup.create.false',
3209 'hg.create.write_on_repogroup.true',
3206 'hg.create.write_on_repogroup.true',
3210 'hg.fork.repository',
3207 'hg.fork.repository',
3211 'hg.register.manual_activate',
3208 'hg.register.manual_activate',
3212 'hg.password_reset.enabled',
3209 'hg.password_reset.enabled',
3213 'hg.extern_activate.auto',
3210 'hg.extern_activate.auto',
3214 'hg.inherit_default_perms.true',
3211 'hg.inherit_default_perms.true',
3215 ]
3212 ]
3216
3213
3217 # defines which permissions are more important higher the more important
3214 # defines which permissions are more important higher the more important
3218 # Weight defines which permissions are more important.
3215 # Weight defines which permissions are more important.
3219 # The higher number the more important.
3216 # The higher number the more important.
3220 PERM_WEIGHTS = {
3217 PERM_WEIGHTS = {
3221 'repository.none': 0,
3218 'repository.none': 0,
3222 'repository.read': 1,
3219 'repository.read': 1,
3223 'repository.write': 3,
3220 'repository.write': 3,
3224 'repository.admin': 4,
3221 'repository.admin': 4,
3225
3222
3226 'group.none': 0,
3223 'group.none': 0,
3227 'group.read': 1,
3224 'group.read': 1,
3228 'group.write': 3,
3225 'group.write': 3,
3229 'group.admin': 4,
3226 'group.admin': 4,
3230
3227
3231 'usergroup.none': 0,
3228 'usergroup.none': 0,
3232 'usergroup.read': 1,
3229 'usergroup.read': 1,
3233 'usergroup.write': 3,
3230 'usergroup.write': 3,
3234 'usergroup.admin': 4,
3231 'usergroup.admin': 4,
3235
3232
3236 'branch.none': 0,
3233 'branch.none': 0,
3237 'branch.merge': 1,
3234 'branch.merge': 1,
3238 'branch.push': 3,
3235 'branch.push': 3,
3239 'branch.push_force': 4,
3236 'branch.push_force': 4,
3240
3237
3241 'hg.repogroup.create.false': 0,
3238 'hg.repogroup.create.false': 0,
3242 'hg.repogroup.create.true': 1,
3239 'hg.repogroup.create.true': 1,
3243
3240
3244 'hg.usergroup.create.false': 0,
3241 'hg.usergroup.create.false': 0,
3245 'hg.usergroup.create.true': 1,
3242 'hg.usergroup.create.true': 1,
3246
3243
3247 'hg.fork.none': 0,
3244 'hg.fork.none': 0,
3248 'hg.fork.repository': 1,
3245 'hg.fork.repository': 1,
3249 'hg.create.none': 0,
3246 'hg.create.none': 0,
3250 'hg.create.repository': 1
3247 'hg.create.repository': 1
3251 }
3248 }
3252
3249
3253 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3250 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3254 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3251 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3255 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3252 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3256
3253
3257 def __repr__(self):
3254 def __repr__(self):
3258 return "<%s('%s:%s')>" % (
3255 return "<%s('%s:%s')>" % (
3259 self.cls_name, self.permission_id, self.permission_name
3256 self.cls_name, self.permission_id, self.permission_name
3260 )
3257 )
3261
3258
3262 @classmethod
3259 @classmethod
3263 def get_by_key(cls, key):
3260 def get_by_key(cls, key):
3264 return cls.query().filter(cls.permission_name == key).scalar()
3261 return cls.query().filter(cls.permission_name == key).scalar()
3265
3262
3266 @classmethod
3263 @classmethod
3267 def get_default_repo_perms(cls, user_id, repo_id=None):
3264 def get_default_repo_perms(cls, user_id, repo_id=None):
3268 q = Session().query(UserRepoToPerm, Repository, Permission)\
3265 q = Session().query(UserRepoToPerm, Repository, Permission)\
3269 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3266 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3270 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3267 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3271 .filter(UserRepoToPerm.user_id == user_id)
3268 .filter(UserRepoToPerm.user_id == user_id)
3272 if repo_id:
3269 if repo_id:
3273 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3270 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3274 return q.all()
3271 return q.all()
3275
3272
3276 @classmethod
3273 @classmethod
3277 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3274 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3278 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3275 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3279 .join(
3276 .join(
3280 Permission,
3277 Permission,
3281 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3278 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3282 .join(
3279 .join(
3283 UserRepoToPerm,
3280 UserRepoToPerm,
3284 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3281 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3285 .filter(UserRepoToPerm.user_id == user_id)
3282 .filter(UserRepoToPerm.user_id == user_id)
3286
3283
3287 if repo_id:
3284 if repo_id:
3288 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3285 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3289 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3286 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3290
3287
3291 @classmethod
3288 @classmethod
3292 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3289 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3293 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3290 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3294 .join(
3291 .join(
3295 Permission,
3292 Permission,
3296 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3293 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3297 .join(
3294 .join(
3298 Repository,
3295 Repository,
3299 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3296 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3300 .join(
3297 .join(
3301 UserGroup,
3298 UserGroup,
3302 UserGroupRepoToPerm.users_group_id ==
3299 UserGroupRepoToPerm.users_group_id ==
3303 UserGroup.users_group_id)\
3300 UserGroup.users_group_id)\
3304 .join(
3301 .join(
3305 UserGroupMember,
3302 UserGroupMember,
3306 UserGroupRepoToPerm.users_group_id ==
3303 UserGroupRepoToPerm.users_group_id ==
3307 UserGroupMember.users_group_id)\
3304 UserGroupMember.users_group_id)\
3308 .filter(
3305 .filter(
3309 UserGroupMember.user_id == user_id,
3306 UserGroupMember.user_id == user_id,
3310 UserGroup.users_group_active == true())
3307 UserGroup.users_group_active == true())
3311 if repo_id:
3308 if repo_id:
3312 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3309 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3313 return q.all()
3310 return q.all()
3314
3311
3315 @classmethod
3312 @classmethod
3316 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3313 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3317 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3314 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3318 .join(
3315 .join(
3319 Permission,
3316 Permission,
3320 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3317 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3321 .join(
3318 .join(
3322 UserGroupRepoToPerm,
3319 UserGroupRepoToPerm,
3323 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3320 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3324 .join(
3321 .join(
3325 UserGroup,
3322 UserGroup,
3326 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3323 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3327 .join(
3324 .join(
3328 UserGroupMember,
3325 UserGroupMember,
3329 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3326 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3330 .filter(
3327 .filter(
3331 UserGroupMember.user_id == user_id,
3328 UserGroupMember.user_id == user_id,
3332 UserGroup.users_group_active == true())
3329 UserGroup.users_group_active == true())
3333
3330
3334 if repo_id:
3331 if repo_id:
3335 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3332 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3336 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3333 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3337
3334
3338 @classmethod
3335 @classmethod
3339 def get_default_group_perms(cls, user_id, repo_group_id=None):
3336 def get_default_group_perms(cls, user_id, repo_group_id=None):
3340 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3337 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3341 .join(
3338 .join(
3342 Permission,
3339 Permission,
3343 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3340 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3344 .join(
3341 .join(
3345 RepoGroup,
3342 RepoGroup,
3346 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3343 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3347 .filter(UserRepoGroupToPerm.user_id == user_id)
3344 .filter(UserRepoGroupToPerm.user_id == user_id)
3348 if repo_group_id:
3345 if repo_group_id:
3349 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3346 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3350 return q.all()
3347 return q.all()
3351
3348
3352 @classmethod
3349 @classmethod
3353 def get_default_group_perms_from_user_group(
3350 def get_default_group_perms_from_user_group(
3354 cls, user_id, repo_group_id=None):
3351 cls, user_id, repo_group_id=None):
3355 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3352 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3356 .join(
3353 .join(
3357 Permission,
3354 Permission,
3358 UserGroupRepoGroupToPerm.permission_id ==
3355 UserGroupRepoGroupToPerm.permission_id ==
3359 Permission.permission_id)\
3356 Permission.permission_id)\
3360 .join(
3357 .join(
3361 RepoGroup,
3358 RepoGroup,
3362 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3359 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3363 .join(
3360 .join(
3364 UserGroup,
3361 UserGroup,
3365 UserGroupRepoGroupToPerm.users_group_id ==
3362 UserGroupRepoGroupToPerm.users_group_id ==
3366 UserGroup.users_group_id)\
3363 UserGroup.users_group_id)\
3367 .join(
3364 .join(
3368 UserGroupMember,
3365 UserGroupMember,
3369 UserGroupRepoGroupToPerm.users_group_id ==
3366 UserGroupRepoGroupToPerm.users_group_id ==
3370 UserGroupMember.users_group_id)\
3367 UserGroupMember.users_group_id)\
3371 .filter(
3368 .filter(
3372 UserGroupMember.user_id == user_id,
3369 UserGroupMember.user_id == user_id,
3373 UserGroup.users_group_active == true())
3370 UserGroup.users_group_active == true())
3374 if repo_group_id:
3371 if repo_group_id:
3375 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3372 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3376 return q.all()
3373 return q.all()
3377
3374
3378 @classmethod
3375 @classmethod
3379 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3376 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3380 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3377 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3381 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3378 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3382 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3379 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3383 .filter(UserUserGroupToPerm.user_id == user_id)
3380 .filter(UserUserGroupToPerm.user_id == user_id)
3384 if user_group_id:
3381 if user_group_id:
3385 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3382 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3386 return q.all()
3383 return q.all()
3387
3384
3388 @classmethod
3385 @classmethod
3389 def get_default_user_group_perms_from_user_group(
3386 def get_default_user_group_perms_from_user_group(
3390 cls, user_id, user_group_id=None):
3387 cls, user_id, user_group_id=None):
3391 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3388 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3392 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3389 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3393 .join(
3390 .join(
3394 Permission,
3391 Permission,
3395 UserGroupUserGroupToPerm.permission_id ==
3392 UserGroupUserGroupToPerm.permission_id ==
3396 Permission.permission_id)\
3393 Permission.permission_id)\
3397 .join(
3394 .join(
3398 TargetUserGroup,
3395 TargetUserGroup,
3399 UserGroupUserGroupToPerm.target_user_group_id ==
3396 UserGroupUserGroupToPerm.target_user_group_id ==
3400 TargetUserGroup.users_group_id)\
3397 TargetUserGroup.users_group_id)\
3401 .join(
3398 .join(
3402 UserGroup,
3399 UserGroup,
3403 UserGroupUserGroupToPerm.user_group_id ==
3400 UserGroupUserGroupToPerm.user_group_id ==
3404 UserGroup.users_group_id)\
3401 UserGroup.users_group_id)\
3405 .join(
3402 .join(
3406 UserGroupMember,
3403 UserGroupMember,
3407 UserGroupUserGroupToPerm.user_group_id ==
3404 UserGroupUserGroupToPerm.user_group_id ==
3408 UserGroupMember.users_group_id)\
3405 UserGroupMember.users_group_id)\
3409 .filter(
3406 .filter(
3410 UserGroupMember.user_id == user_id,
3407 UserGroupMember.user_id == user_id,
3411 UserGroup.users_group_active == true())
3408 UserGroup.users_group_active == true())
3412 if user_group_id:
3409 if user_group_id:
3413 q = q.filter(
3410 q = q.filter(
3414 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3411 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3415
3412
3416 return q.all()
3413 return q.all()
3417
3414
3418
3415
3419 class UserRepoToPerm(Base, BaseModel):
3416 class UserRepoToPerm(Base, BaseModel):
3420 __tablename__ = 'repo_to_perm'
3417 __tablename__ = 'repo_to_perm'
3421 __table_args__ = (
3418 __table_args__ = (
3422 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3419 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3423 base_table_args
3420 base_table_args
3424 )
3421 )
3425
3422
3426 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3423 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3427 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3424 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3428 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3425 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3429 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3426 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3430
3427
3431 user = relationship('User', back_populates="repo_to_perm")
3428 user = relationship('User', back_populates="repo_to_perm")
3432 repository = relationship('Repository', back_populates="repo_to_perm")
3429 repository = relationship('Repository', back_populates="repo_to_perm")
3433 permission = relationship('Permission')
3430 permission = relationship('Permission')
3434
3431
3435 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm')
3432 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm')
3436
3433
3437 @classmethod
3434 @classmethod
3438 def create(cls, user, repository, permission):
3435 def create(cls, user, repository, permission):
3439 n = cls()
3436 n = cls()
3440 n.user = user
3437 n.user = user
3441 n.repository = repository
3438 n.repository = repository
3442 n.permission = permission
3439 n.permission = permission
3443 Session().add(n)
3440 Session().add(n)
3444 return n
3441 return n
3445
3442
3446 def __repr__(self):
3443 def __repr__(self):
3447 return f'<{self.user} => {self.repository} >'
3444 return f'<{self.user} => {self.repository} >'
3448
3445
3449
3446
3450 class UserUserGroupToPerm(Base, BaseModel):
3447 class UserUserGroupToPerm(Base, BaseModel):
3451 __tablename__ = 'user_user_group_to_perm'
3448 __tablename__ = 'user_user_group_to_perm'
3452 __table_args__ = (
3449 __table_args__ = (
3453 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3450 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3454 base_table_args
3451 base_table_args
3455 )
3452 )
3456
3453
3457 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3454 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3458 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3455 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3459 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3456 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3460 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3457 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3461
3458
3462 user = relationship('User', back_populates='user_group_to_perm')
3459 user = relationship('User', back_populates='user_group_to_perm')
3463 user_group = relationship('UserGroup', back_populates='user_user_group_to_perm')
3460 user_group = relationship('UserGroup', back_populates='user_user_group_to_perm')
3464 permission = relationship('Permission')
3461 permission = relationship('Permission')
3465
3462
3466 @classmethod
3463 @classmethod
3467 def create(cls, user, user_group, permission):
3464 def create(cls, user, user_group, permission):
3468 n = cls()
3465 n = cls()
3469 n.user = user
3466 n.user = user
3470 n.user_group = user_group
3467 n.user_group = user_group
3471 n.permission = permission
3468 n.permission = permission
3472 Session().add(n)
3469 Session().add(n)
3473 return n
3470 return n
3474
3471
3475 def __repr__(self):
3472 def __repr__(self):
3476 return f'<{self.user} => {self.user_group} >'
3473 return f'<{self.user} => {self.user_group} >'
3477
3474
3478
3475
3479 class UserToPerm(Base, BaseModel):
3476 class UserToPerm(Base, BaseModel):
3480 __tablename__ = 'user_to_perm'
3477 __tablename__ = 'user_to_perm'
3481 __table_args__ = (
3478 __table_args__ = (
3482 UniqueConstraint('user_id', 'permission_id'),
3479 UniqueConstraint('user_id', 'permission_id'),
3483 base_table_args
3480 base_table_args
3484 )
3481 )
3485
3482
3486 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3483 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3487 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3484 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3488 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3485 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3489
3486
3490 user = relationship('User', back_populates='user_perms')
3487 user = relationship('User', back_populates='user_perms')
3491 permission = relationship('Permission', lazy='joined')
3488 permission = relationship('Permission', lazy='joined')
3492
3489
3493 def __repr__(self):
3490 def __repr__(self):
3494 return f'<{self.user} => {self.permission} >'
3491 return f'<{self.user} => {self.permission} >'
3495
3492
3496
3493
3497 class UserGroupRepoToPerm(Base, BaseModel):
3494 class UserGroupRepoToPerm(Base, BaseModel):
3498 __tablename__ = 'users_group_repo_to_perm'
3495 __tablename__ = 'users_group_repo_to_perm'
3499 __table_args__ = (
3496 __table_args__ = (
3500 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3497 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3501 base_table_args
3498 base_table_args
3502 )
3499 )
3503
3500
3504 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3501 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3505 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3502 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3506 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3503 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3507 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3504 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3508
3505
3509 users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm')
3506 users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm')
3510 permission = relationship('Permission')
3507 permission = relationship('Permission')
3511 repository = relationship('Repository', back_populates='users_group_to_perm')
3508 repository = relationship('Repository', back_populates='users_group_to_perm')
3512 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm')
3509 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm')
3513
3510
3514 @classmethod
3511 @classmethod
3515 def create(cls, users_group, repository, permission):
3512 def create(cls, users_group, repository, permission):
3516 n = cls()
3513 n = cls()
3517 n.users_group = users_group
3514 n.users_group = users_group
3518 n.repository = repository
3515 n.repository = repository
3519 n.permission = permission
3516 n.permission = permission
3520 Session().add(n)
3517 Session().add(n)
3521 return n
3518 return n
3522
3519
3523 def __repr__(self):
3520 def __repr__(self):
3524 return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >'
3521 return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >'
3525
3522
3526
3523
3527 class UserGroupUserGroupToPerm(Base, BaseModel):
3524 class UserGroupUserGroupToPerm(Base, BaseModel):
3528 __tablename__ = 'user_group_user_group_to_perm'
3525 __tablename__ = 'user_group_user_group_to_perm'
3529 __table_args__ = (
3526 __table_args__ = (
3530 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3527 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3531 CheckConstraint('target_user_group_id != user_group_id'),
3528 CheckConstraint('target_user_group_id != user_group_id'),
3532 base_table_args
3529 base_table_args
3533 )
3530 )
3534
3531
3535 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3532 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3536 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3533 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3537 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3534 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3538 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3535 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3539
3536
3540 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm')
3537 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm')
3541 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3538 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3542 permission = relationship('Permission')
3539 permission = relationship('Permission')
3543
3540
3544 @classmethod
3541 @classmethod
3545 def create(cls, target_user_group, user_group, permission):
3542 def create(cls, target_user_group, user_group, permission):
3546 n = cls()
3543 n = cls()
3547 n.target_user_group = target_user_group
3544 n.target_user_group = target_user_group
3548 n.user_group = user_group
3545 n.user_group = user_group
3549 n.permission = permission
3546 n.permission = permission
3550 Session().add(n)
3547 Session().add(n)
3551 return n
3548 return n
3552
3549
3553 def __repr__(self):
3550 def __repr__(self):
3554 return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >'
3551 return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >'
3555
3552
3556
3553
3557 class UserGroupToPerm(Base, BaseModel):
3554 class UserGroupToPerm(Base, BaseModel):
3558 __tablename__ = 'users_group_to_perm'
3555 __tablename__ = 'users_group_to_perm'
3559 __table_args__ = (
3556 __table_args__ = (
3560 UniqueConstraint('users_group_id', 'permission_id',),
3557 UniqueConstraint('users_group_id', 'permission_id',),
3561 base_table_args
3558 base_table_args
3562 )
3559 )
3563
3560
3564 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3561 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3565 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3562 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3566 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3563 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3567
3564
3568 users_group = relationship('UserGroup', back_populates='users_group_to_perm')
3565 users_group = relationship('UserGroup', back_populates='users_group_to_perm')
3569 permission = relationship('Permission')
3566 permission = relationship('Permission')
3570
3567
3571
3568
3572 class UserRepoGroupToPerm(Base, BaseModel):
3569 class UserRepoGroupToPerm(Base, BaseModel):
3573 __tablename__ = 'user_repo_group_to_perm'
3570 __tablename__ = 'user_repo_group_to_perm'
3574 __table_args__ = (
3571 __table_args__ = (
3575 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3572 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3576 base_table_args
3573 base_table_args
3577 )
3574 )
3578
3575
3579 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3576 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3580 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3577 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3581 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3578 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3582 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3579 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3583
3580
3584 user = relationship('User', back_populates='repo_group_to_perm')
3581 user = relationship('User', back_populates='repo_group_to_perm')
3585 group = relationship('RepoGroup', back_populates='repo_group_to_perm')
3582 group = relationship('RepoGroup', back_populates='repo_group_to_perm')
3586 permission = relationship('Permission')
3583 permission = relationship('Permission')
3587
3584
3588 @classmethod
3585 @classmethod
3589 def create(cls, user, repository_group, permission):
3586 def create(cls, user, repository_group, permission):
3590 n = cls()
3587 n = cls()
3591 n.user = user
3588 n.user = user
3592 n.group = repository_group
3589 n.group = repository_group
3593 n.permission = permission
3590 n.permission = permission
3594 Session().add(n)
3591 Session().add(n)
3595 return n
3592 return n
3596
3593
3597
3594
3598 class UserGroupRepoGroupToPerm(Base, BaseModel):
3595 class UserGroupRepoGroupToPerm(Base, BaseModel):
3599 __tablename__ = 'users_group_repo_group_to_perm'
3596 __tablename__ = 'users_group_repo_group_to_perm'
3600 __table_args__ = (
3597 __table_args__ = (
3601 UniqueConstraint('users_group_id', 'group_id'),
3598 UniqueConstraint('users_group_id', 'group_id'),
3602 base_table_args
3599 base_table_args
3603 )
3600 )
3604
3601
3605 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3602 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3606 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3603 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3607 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3604 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3608 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3605 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3609
3606
3610 users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm')
3607 users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm')
3611 permission = relationship('Permission')
3608 permission = relationship('Permission')
3612 group = relationship('RepoGroup', back_populates='users_group_to_perm')
3609 group = relationship('RepoGroup', back_populates='users_group_to_perm')
3613
3610
3614 @classmethod
3611 @classmethod
3615 def create(cls, user_group, repository_group, permission):
3612 def create(cls, user_group, repository_group, permission):
3616 n = cls()
3613 n = cls()
3617 n.users_group = user_group
3614 n.users_group = user_group
3618 n.group = repository_group
3615 n.group = repository_group
3619 n.permission = permission
3616 n.permission = permission
3620 Session().add(n)
3617 Session().add(n)
3621 return n
3618 return n
3622
3619
3623 def __repr__(self):
3620 def __repr__(self):
3624 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3621 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3625
3622
3626
3623
3627 class Statistics(Base, BaseModel):
3624 class Statistics(Base, BaseModel):
3628 __tablename__ = 'statistics'
3625 __tablename__ = 'statistics'
3629 __table_args__ = (
3626 __table_args__ = (
3630 base_table_args
3627 base_table_args
3631 )
3628 )
3632
3629
3633 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3630 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3634 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3631 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3635 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3632 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3636 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data
3633 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data
3637 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data
3634 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data
3638 languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data
3635 languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data
3639
3636
3640 repository = relationship('Repository', single_parent=True, viewonly=True)
3637 repository = relationship('Repository', single_parent=True, viewonly=True)
3641
3638
3642
3639
3643 class UserFollowing(Base, BaseModel):
3640 class UserFollowing(Base, BaseModel):
3644 __tablename__ = 'user_followings'
3641 __tablename__ = 'user_followings'
3645 __table_args__ = (
3642 __table_args__ = (
3646 UniqueConstraint('user_id', 'follows_repository_id'),
3643 UniqueConstraint('user_id', 'follows_repository_id'),
3647 UniqueConstraint('user_id', 'follows_user_id'),
3644 UniqueConstraint('user_id', 'follows_user_id'),
3648 base_table_args
3645 base_table_args
3649 )
3646 )
3650
3647
3651 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3648 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3652 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3649 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3653 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3650 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3654 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3651 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3655 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3652 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3656
3653
3657 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings')
3654 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings')
3658
3655
3659 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3656 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3660 follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers')
3657 follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers')
3661
3658
3662 @classmethod
3659 @classmethod
3663 def get_repo_followers(cls, repo_id):
3660 def get_repo_followers(cls, repo_id):
3664 return cls.query().filter(cls.follows_repo_id == repo_id)
3661 return cls.query().filter(cls.follows_repo_id == repo_id)
3665
3662
3666
3663
3667 class CacheKey(Base, BaseModel):
3664 class CacheKey(Base, BaseModel):
3668 __tablename__ = 'cache_invalidation'
3665 __tablename__ = 'cache_invalidation'
3669 __table_args__ = (
3666 __table_args__ = (
3670 UniqueConstraint('cache_key'),
3667 UniqueConstraint('cache_key'),
3671 Index('key_idx', 'cache_key'),
3668 Index('key_idx', 'cache_key'),
3672 Index('cache_args_idx', 'cache_args'),
3669 Index('cache_args_idx', 'cache_args'),
3673 base_table_args,
3670 base_table_args,
3674 )
3671 )
3675
3672
3676 CACHE_TYPE_FEED = 'FEED'
3673 CACHE_TYPE_FEED = 'FEED'
3677
3674
3678 # namespaces used to register process/thread aware caches
3675 # namespaces used to register process/thread aware caches
3679 REPO_INVALIDATION_NAMESPACE = 'repo_cache.v1:{repo_id}'
3676 REPO_INVALIDATION_NAMESPACE = 'repo_cache.v1:{repo_id}'
3680
3677
3681 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3678 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3682 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3679 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3683 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3680 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3684 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3681 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3685 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3682 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3686
3683
3687 def __init__(self, cache_key, cache_args='', cache_state_uid=None, cache_active=False):
3684 def __init__(self, cache_key, cache_args='', cache_state_uid=None, cache_active=False):
3688 self.cache_key = cache_key
3685 self.cache_key = cache_key
3689 self.cache_args = cache_args
3686 self.cache_args = cache_args
3690 self.cache_active = cache_active
3687 self.cache_active = cache_active
3691 # first key should be same for all entries, since all workers should share it
3688 # first key should be same for all entries, since all workers should share it
3692 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3689 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3693
3690
3694 def __repr__(self):
3691 def __repr__(self):
3695 return "<%s('%s:%s[%s]')>" % (
3692 return "<%s('%s:%s[%s]')>" % (
3696 self.cls_name,
3693 self.cls_name,
3697 self.cache_id, self.cache_key, self.cache_active)
3694 self.cache_id, self.cache_key, self.cache_active)
3698
3695
3699 def _cache_key_partition(self):
3696 def _cache_key_partition(self):
3700 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3697 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3701 return prefix, repo_name, suffix
3698 return prefix, repo_name, suffix
3702
3699
3703 def get_prefix(self):
3700 def get_prefix(self):
3704 """
3701 """
3705 Try to extract prefix from existing cache key. The key could consist
3702 Try to extract prefix from existing cache key. The key could consist
3706 of prefix, repo_name, suffix
3703 of prefix, repo_name, suffix
3707 """
3704 """
3708 # this returns prefix, repo_name, suffix
3705 # this returns prefix, repo_name, suffix
3709 return self._cache_key_partition()[0]
3706 return self._cache_key_partition()[0]
3710
3707
3711 def get_suffix(self):
3708 def get_suffix(self):
3712 """
3709 """
3713 get suffix that might have been used in _get_cache_key to
3710 get suffix that might have been used in _get_cache_key to
3714 generate self.cache_key. Only used for informational purposes
3711 generate self.cache_key. Only used for informational purposes
3715 in repo_edit.mako.
3712 in repo_edit.mako.
3716 """
3713 """
3717 # prefix, repo_name, suffix
3714 # prefix, repo_name, suffix
3718 return self._cache_key_partition()[2]
3715 return self._cache_key_partition()[2]
3719
3716
3720 @classmethod
3717 @classmethod
3721 def generate_new_state_uid(cls, based_on=None):
3718 def generate_new_state_uid(cls, based_on=None):
3722 if based_on:
3719 if based_on:
3723 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3720 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3724 else:
3721 else:
3725 return str(uuid.uuid4())
3722 return str(uuid.uuid4())
3726
3723
3727 @classmethod
3724 @classmethod
3728 def delete_all_cache(cls):
3725 def delete_all_cache(cls):
3729 """
3726 """
3730 Delete all cache keys from database.
3727 Delete all cache keys from database.
3731 Should only be run when all instances are down and all entries
3728 Should only be run when all instances are down and all entries
3732 thus stale.
3729 thus stale.
3733 """
3730 """
3734 cls.query().delete()
3731 cls.query().delete()
3735 Session().commit()
3732 Session().commit()
3736
3733
3737 @classmethod
3734 @classmethod
3738 def set_invalidate(cls, cache_uid, delete=False):
3735 def set_invalidate(cls, cache_uid, delete=False):
3739 """
3736 """
3740 Mark all caches of a repo as invalid in the database.
3737 Mark all caches of a repo as invalid in the database.
3741 """
3738 """
3742 try:
3739 try:
3743 qry = Session().query(cls).filter(cls.cache_key == cache_uid)
3740 qry = Session().query(cls).filter(cls.cache_key == cache_uid)
3744 if delete:
3741 if delete:
3745 qry.delete()
3742 qry.delete()
3746 log.debug('cache objects deleted for cache args %s',
3743 log.debug('cache objects deleted for cache args %s',
3747 safe_str(cache_uid))
3744 safe_str(cache_uid))
3748 else:
3745 else:
3749 new_uid = cls.generate_new_state_uid()
3746 new_uid = cls.generate_new_state_uid()
3750 qry.update({"cache_state_uid": new_uid,
3747 qry.update({"cache_state_uid": new_uid,
3751 "cache_args": f"repo_state:{time.time()}"})
3748 "cache_args": f"repo_state:{time.time()}"})
3752 log.debug('cache object %s set new UID %s',
3749 log.debug('cache object %s set new UID %s',
3753 safe_str(cache_uid), new_uid)
3750 safe_str(cache_uid), new_uid)
3754
3751
3755 Session().commit()
3752 Session().commit()
3756 except Exception:
3753 except Exception:
3757 log.exception(
3754 log.exception(
3758 'Cache key invalidation failed for cache args %s',
3755 'Cache key invalidation failed for cache args %s',
3759 safe_str(cache_uid))
3756 safe_str(cache_uid))
3760 Session().rollback()
3757 Session().rollback()
3761
3758
3762 @classmethod
3759 @classmethod
3763 def get_active_cache(cls, cache_key):
3760 def get_active_cache(cls, cache_key):
3764 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3761 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3765 if inv_obj:
3762 if inv_obj:
3766 return inv_obj
3763 return inv_obj
3767 return None
3764 return None
3768
3765
3769 @classmethod
3766 @classmethod
3770 def get_namespace_map(cls, namespace):
3767 def get_namespace_map(cls, namespace):
3771 return {
3768 return {
3772 x.cache_key: x
3769 x.cache_key: x
3773 for x in cls.query().filter(cls.cache_args == namespace)}
3770 for x in cls.query().filter(cls.cache_args == namespace)}
3774
3771
3775
3772
3776 class ChangesetComment(Base, BaseModel):
3773 class ChangesetComment(Base, BaseModel):
3777 __tablename__ = 'changeset_comments'
3774 __tablename__ = 'changeset_comments'
3778 __table_args__ = (
3775 __table_args__ = (
3779 Index('cc_revision_idx', 'revision'),
3776 Index('cc_revision_idx', 'revision'),
3780 base_table_args,
3777 base_table_args,
3781 )
3778 )
3782
3779
3783 COMMENT_OUTDATED = 'comment_outdated'
3780 COMMENT_OUTDATED = 'comment_outdated'
3784 COMMENT_TYPE_NOTE = 'note'
3781 COMMENT_TYPE_NOTE = 'note'
3785 COMMENT_TYPE_TODO = 'todo'
3782 COMMENT_TYPE_TODO = 'todo'
3786 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3783 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3787
3784
3788 OP_IMMUTABLE = 'immutable'
3785 OP_IMMUTABLE = 'immutable'
3789 OP_CHANGEABLE = 'changeable'
3786 OP_CHANGEABLE = 'changeable'
3790
3787
3791 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3788 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3792 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3789 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3793 revision = Column('revision', String(40), nullable=True)
3790 revision = Column('revision', String(40), nullable=True)
3794 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3791 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3795 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3792 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3796 line_no = Column('line_no', Unicode(10), nullable=True)
3793 line_no = Column('line_no', Unicode(10), nullable=True)
3797 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3794 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3798 f_path = Column('f_path', Unicode(1000), nullable=True)
3795 f_path = Column('f_path', Unicode(1000), nullable=True)
3799 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3796 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3800 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3797 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3801 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3798 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3802 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3799 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3803 renderer = Column('renderer', Unicode(64), nullable=True)
3800 renderer = Column('renderer', Unicode(64), nullable=True)
3804 display_state = Column('display_state', Unicode(128), nullable=True)
3801 display_state = Column('display_state', Unicode(128), nullable=True)
3805 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3802 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3806 draft = Column('draft', Boolean(), nullable=True, default=False)
3803 draft = Column('draft', Boolean(), nullable=True, default=False)
3807
3804
3808 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3805 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3809 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3806 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3810
3807
3811 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3808 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3812 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3809 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3813
3810
3814 author = relationship('User', lazy='select', back_populates='user_comments')
3811 author = relationship('User', lazy='select', back_populates='user_comments')
3815 repo = relationship('Repository', back_populates='comments')
3812 repo = relationship('Repository', back_populates='comments')
3816 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment')
3813 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment')
3817 pull_request = relationship('PullRequest', lazy='select', back_populates='comments')
3814 pull_request = relationship('PullRequest', lazy='select', back_populates='comments')
3818 pull_request_version = relationship('PullRequestVersion', lazy='select')
3815 pull_request_version = relationship('PullRequestVersion', lazy='select')
3819 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment")
3816 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment")
3820
3817
3821 @classmethod
3818 @classmethod
3822 def get_users(cls, revision=None, pull_request_id=None):
3819 def get_users(cls, revision=None, pull_request_id=None):
3823 """
3820 """
3824 Returns user associated with this ChangesetComment. ie those
3821 Returns user associated with this ChangesetComment. ie those
3825 who actually commented
3822 who actually commented
3826
3823
3827 :param cls:
3824 :param cls:
3828 :param revision:
3825 :param revision:
3829 """
3826 """
3830 q = Session().query(User).join(ChangesetComment.author)
3827 q = Session().query(User).join(ChangesetComment.author)
3831 if revision:
3828 if revision:
3832 q = q.filter(cls.revision == revision)
3829 q = q.filter(cls.revision == revision)
3833 elif pull_request_id:
3830 elif pull_request_id:
3834 q = q.filter(cls.pull_request_id == pull_request_id)
3831 q = q.filter(cls.pull_request_id == pull_request_id)
3835 return q.all()
3832 return q.all()
3836
3833
3837 @classmethod
3834 @classmethod
3838 def get_index_from_version(cls, pr_version, versions=None, num_versions=None) -> int:
3835 def get_index_from_version(cls, pr_version, versions=None, num_versions=None) -> int:
3839 if pr_version is None:
3836 if pr_version is None:
3840 return 0
3837 return 0
3841
3838
3842 if versions is not None:
3839 if versions is not None:
3843 num_versions = [x.pull_request_version_id for x in versions]
3840 num_versions = [x.pull_request_version_id for x in versions]
3844
3841
3845 num_versions = num_versions or []
3842 num_versions = num_versions or []
3846 try:
3843 try:
3847 return num_versions.index(pr_version) + 1
3844 return num_versions.index(pr_version) + 1
3848 except (IndexError, ValueError):
3845 except (IndexError, ValueError):
3849 return 0
3846 return 0
3850
3847
3851 @property
3848 @property
3852 def outdated(self):
3849 def outdated(self):
3853 return self.display_state == self.COMMENT_OUTDATED
3850 return self.display_state == self.COMMENT_OUTDATED
3854
3851
3855 @property
3852 @property
3856 def outdated_js(self):
3853 def outdated_js(self):
3857 return str_json(self.display_state == self.COMMENT_OUTDATED)
3854 return str_json(self.display_state == self.COMMENT_OUTDATED)
3858
3855
3859 @property
3856 @property
3860 def immutable(self):
3857 def immutable(self):
3861 return self.immutable_state == self.OP_IMMUTABLE
3858 return self.immutable_state == self.OP_IMMUTABLE
3862
3859
3863 def outdated_at_version(self, version: int) -> bool:
3860 def outdated_at_version(self, version: int) -> bool:
3864 """
3861 """
3865 Checks if comment is outdated for given pull request version
3862 Checks if comment is outdated for given pull request version
3866 """
3863 """
3867
3864
3868 def version_check():
3865 def version_check():
3869 return self.pull_request_version_id and self.pull_request_version_id != version
3866 return self.pull_request_version_id and self.pull_request_version_id != version
3870
3867
3871 if self.is_inline:
3868 if self.is_inline:
3872 return self.outdated and version_check()
3869 return self.outdated and version_check()
3873 else:
3870 else:
3874 # general comments don't have .outdated set, also latest don't have a version
3871 # general comments don't have .outdated set, also latest don't have a version
3875 return version_check()
3872 return version_check()
3876
3873
3877 def outdated_at_version_js(self, version):
3874 def outdated_at_version_js(self, version):
3878 """
3875 """
3879 Checks if comment is outdated for given pull request version
3876 Checks if comment is outdated for given pull request version
3880 """
3877 """
3881 return str_json(self.outdated_at_version(version))
3878 return str_json(self.outdated_at_version(version))
3882
3879
3883 def older_than_version(self, version: int) -> bool:
3880 def older_than_version(self, version: int) -> bool:
3884 """
3881 """
3885 Checks if comment is made from a previous version than given.
3882 Checks if comment is made from a previous version than given.
3886 Assumes self.pull_request_version.pull_request_version_id is an integer if not None.
3883 Assumes self.pull_request_version.pull_request_version_id is an integer if not None.
3887 """
3884 """
3888
3885
3889 # If version is None, return False as the current version cannot be less than None
3886 # If version is None, return False as the current version cannot be less than None
3890 if version is None:
3887 if version is None:
3891 return False
3888 return False
3892
3889
3893 # Ensure that the version is an integer to prevent TypeError on comparison
3890 # Ensure that the version is an integer to prevent TypeError on comparison
3894 if not isinstance(version, int):
3891 if not isinstance(version, int):
3895 raise ValueError("The provided version must be an integer.")
3892 raise ValueError("The provided version must be an integer.")
3896
3893
3897 # Initialize current version to 0 or pull_request_version_id if it's available
3894 # Initialize current version to 0 or pull_request_version_id if it's available
3898 cur_ver = 0
3895 cur_ver = 0
3899 if self.pull_request_version and self.pull_request_version.pull_request_version_id is not None:
3896 if self.pull_request_version and self.pull_request_version.pull_request_version_id is not None:
3900 cur_ver = self.pull_request_version.pull_request_version_id
3897 cur_ver = self.pull_request_version.pull_request_version_id
3901
3898
3902 # Return True if the current version is less than the given version
3899 # Return True if the current version is less than the given version
3903 return cur_ver < version
3900 return cur_ver < version
3904
3901
3905 def older_than_version_js(self, version):
3902 def older_than_version_js(self, version):
3906 """
3903 """
3907 Checks if comment is made from previous version than given
3904 Checks if comment is made from previous version than given
3908 """
3905 """
3909 return str_json(self.older_than_version(version))
3906 return str_json(self.older_than_version(version))
3910
3907
3911 @property
3908 @property
3912 def commit_id(self):
3909 def commit_id(self):
3913 """New style naming to stop using .revision"""
3910 """New style naming to stop using .revision"""
3914 return self.revision
3911 return self.revision
3915
3912
3916 @property
3913 @property
3917 def resolved(self):
3914 def resolved(self):
3918 return self.resolved_by[0] if self.resolved_by else None
3915 return self.resolved_by[0] if self.resolved_by else None
3919
3916
3920 @property
3917 @property
3921 def is_todo(self):
3918 def is_todo(self):
3922 return self.comment_type == self.COMMENT_TYPE_TODO
3919 return self.comment_type == self.COMMENT_TYPE_TODO
3923
3920
3924 @property
3921 @property
3925 def is_inline(self):
3922 def is_inline(self):
3926 if self.line_no and self.f_path:
3923 if self.line_no and self.f_path:
3927 return True
3924 return True
3928 return False
3925 return False
3929
3926
3930 @property
3927 @property
3931 def last_version(self):
3928 def last_version(self):
3932 version = 0
3929 version = 0
3933 if self.history:
3930 if self.history:
3934 version = self.history[-1].version
3931 version = self.history[-1].version
3935 return version
3932 return version
3936
3933
3937 def get_index_version(self, versions):
3934 def get_index_version(self, versions):
3938 return self.get_index_from_version(
3935 return self.get_index_from_version(
3939 self.pull_request_version_id, versions)
3936 self.pull_request_version_id, versions)
3940
3937
3941 @property
3938 @property
3942 def review_status(self):
3939 def review_status(self):
3943 if self.status_change:
3940 if self.status_change:
3944 return self.status_change[0].status
3941 return self.status_change[0].status
3945
3942
3946 @property
3943 @property
3947 def review_status_lbl(self):
3944 def review_status_lbl(self):
3948 if self.status_change:
3945 if self.status_change:
3949 return self.status_change[0].status_lbl
3946 return self.status_change[0].status_lbl
3950
3947
3951 def __repr__(self):
3948 def __repr__(self):
3952 if self.comment_id:
3949 if self.comment_id:
3953 return f'<DB:Comment #{self.comment_id}>'
3950 return f'<DB:Comment #{self.comment_id}>'
3954 else:
3951 else:
3955 return f'<DB:Comment at {id(self)!r}>'
3952 return f'<DB:Comment at {id(self)!r}>'
3956
3953
3957 def get_api_data(self):
3954 def get_api_data(self):
3958 comment = self
3955 comment = self
3959
3956
3960 data = {
3957 data = {
3961 'comment_id': comment.comment_id,
3958 'comment_id': comment.comment_id,
3962 'comment_type': comment.comment_type,
3959 'comment_type': comment.comment_type,
3963 'comment_text': comment.text,
3960 'comment_text': comment.text,
3964 'comment_status': comment.status_change,
3961 'comment_status': comment.status_change,
3965 'comment_f_path': comment.f_path,
3962 'comment_f_path': comment.f_path,
3966 'comment_lineno': comment.line_no,
3963 'comment_lineno': comment.line_no,
3967 'comment_author': comment.author,
3964 'comment_author': comment.author,
3968 'comment_created_on': comment.created_on,
3965 'comment_created_on': comment.created_on,
3969 'comment_resolved_by': self.resolved,
3966 'comment_resolved_by': self.resolved,
3970 'comment_commit_id': comment.revision,
3967 'comment_commit_id': comment.revision,
3971 'comment_pull_request_id': comment.pull_request_id,
3968 'comment_pull_request_id': comment.pull_request_id,
3972 'comment_last_version': self.last_version
3969 'comment_last_version': self.last_version
3973 }
3970 }
3974 return data
3971 return data
3975
3972
3976 def __json__(self):
3973 def __json__(self):
3977 data = dict()
3974 data = dict()
3978 data.update(self.get_api_data())
3975 data.update(self.get_api_data())
3979 return data
3976 return data
3980
3977
3981
3978
3982 class ChangesetCommentHistory(Base, BaseModel):
3979 class ChangesetCommentHistory(Base, BaseModel):
3983 __tablename__ = 'changeset_comments_history'
3980 __tablename__ = 'changeset_comments_history'
3984 __table_args__ = (
3981 __table_args__ = (
3985 Index('cch_comment_id_idx', 'comment_id'),
3982 Index('cch_comment_id_idx', 'comment_id'),
3986 base_table_args,
3983 base_table_args,
3987 )
3984 )
3988
3985
3989 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3986 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3990 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3987 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3991 version = Column("version", Integer(), nullable=False, default=0)
3988 version = Column("version", Integer(), nullable=False, default=0)
3992 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3989 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3993 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3990 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3994 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3991 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3995 deleted = Column('deleted', Boolean(), default=False)
3992 deleted = Column('deleted', Boolean(), default=False)
3996
3993
3997 author = relationship('User', lazy='joined')
3994 author = relationship('User', lazy='joined')
3998 comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history")
3995 comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history")
3999
3996
4000 @classmethod
3997 @classmethod
4001 def get_version(cls, comment_id):
3998 def get_version(cls, comment_id):
4002 q = Session().query(ChangesetCommentHistory).filter(
3999 q = Session().query(ChangesetCommentHistory).filter(
4003 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
4000 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
4004 if q.count() == 0:
4001 if q.count() == 0:
4005 return 1
4002 return 1
4006 elif q.count() >= q[0].version:
4003 elif q.count() >= q[0].version:
4007 return q.count() + 1
4004 return q.count() + 1
4008 else:
4005 else:
4009 return q[0].version + 1
4006 return q[0].version + 1
4010
4007
4011
4008
4012 class ChangesetStatus(Base, BaseModel):
4009 class ChangesetStatus(Base, BaseModel):
4013 __tablename__ = 'changeset_statuses'
4010 __tablename__ = 'changeset_statuses'
4014 __table_args__ = (
4011 __table_args__ = (
4015 Index('cs_revision_idx', 'revision'),
4012 Index('cs_revision_idx', 'revision'),
4016 Index('cs_version_idx', 'version'),
4013 Index('cs_version_idx', 'version'),
4017 UniqueConstraint('repo_id', 'revision', 'version'),
4014 UniqueConstraint('repo_id', 'revision', 'version'),
4018 base_table_args
4015 base_table_args
4019 )
4016 )
4020
4017
4021 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
4018 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
4022 STATUS_APPROVED = 'approved'
4019 STATUS_APPROVED = 'approved'
4023 STATUS_REJECTED = 'rejected'
4020 STATUS_REJECTED = 'rejected'
4024 STATUS_UNDER_REVIEW = 'under_review'
4021 STATUS_UNDER_REVIEW = 'under_review'
4025
4022
4026 STATUSES = [
4023 STATUSES = [
4027 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
4024 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
4028 (STATUS_APPROVED, _("Approved")),
4025 (STATUS_APPROVED, _("Approved")),
4029 (STATUS_REJECTED, _("Rejected")),
4026 (STATUS_REJECTED, _("Rejected")),
4030 (STATUS_UNDER_REVIEW, _("Under Review")),
4027 (STATUS_UNDER_REVIEW, _("Under Review")),
4031 ]
4028 ]
4032
4029
4033 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4030 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4034 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4031 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4035 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4032 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4036 revision = Column('revision', String(40), nullable=False)
4033 revision = Column('revision', String(40), nullable=False)
4037 status = Column('status', String(128), nullable=False, default=DEFAULT)
4034 status = Column('status', String(128), nullable=False, default=DEFAULT)
4038 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4035 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4039 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4036 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4040 version = Column('version', Integer(), nullable=False, default=0)
4037 version = Column('version', Integer(), nullable=False, default=0)
4041 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4038 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4042
4039
4043 author = relationship('User', lazy='select')
4040 author = relationship('User', lazy='select')
4044 repo = relationship('Repository', lazy='select')
4041 repo = relationship('Repository', lazy='select')
4045 comment = relationship('ChangesetComment', lazy='select', back_populates='status_change')
4042 comment = relationship('ChangesetComment', lazy='select', back_populates='status_change')
4046 pull_request = relationship('PullRequest', lazy='select', back_populates='statuses')
4043 pull_request = relationship('PullRequest', lazy='select', back_populates='statuses')
4047
4044
4048 def __repr__(self):
4045 def __repr__(self):
4049 return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>"
4046 return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>"
4050
4047
4051 @classmethod
4048 @classmethod
4052 def get_status_lbl(cls, value):
4049 def get_status_lbl(cls, value):
4053 return dict(cls.STATUSES).get(value)
4050 return dict(cls.STATUSES).get(value)
4054
4051
4055 @property
4052 @property
4056 def status_lbl(self):
4053 def status_lbl(self):
4057 return ChangesetStatus.get_status_lbl(self.status)
4054 return ChangesetStatus.get_status_lbl(self.status)
4058
4055
4059 def get_api_data(self):
4056 def get_api_data(self):
4060 status = self
4057 status = self
4061 data = {
4058 data = {
4062 'status_id': status.changeset_status_id,
4059 'status_id': status.changeset_status_id,
4063 'status': status.status,
4060 'status': status.status,
4064 }
4061 }
4065 return data
4062 return data
4066
4063
4067 def __json__(self):
4064 def __json__(self):
4068 data = dict()
4065 data = dict()
4069 data.update(self.get_api_data())
4066 data.update(self.get_api_data())
4070 return data
4067 return data
4071
4068
4072
4069
4073 class _SetState(object):
4070 class _SetState(object):
4074 """
4071 """
4075 Context processor allowing changing state for sensitive operation such as
4072 Context processor allowing changing state for sensitive operation such as
4076 pull request update or merge
4073 pull request update or merge
4077 """
4074 """
4078
4075
4079 def __init__(self, pull_request, pr_state, back_state=None):
4076 def __init__(self, pull_request, pr_state, back_state=None):
4080 self._pr = pull_request
4077 self._pr = pull_request
4081 self._org_state = back_state or pull_request.pull_request_state
4078 self._org_state = back_state or pull_request.pull_request_state
4082 self._pr_state = pr_state
4079 self._pr_state = pr_state
4083 self._current_state = None
4080 self._current_state = None
4084
4081
4085 def __enter__(self):
4082 def __enter__(self):
4086 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4083 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4087 self._pr, self._pr_state)
4084 self._pr, self._pr_state)
4088 self.set_pr_state(self._pr_state)
4085 self.set_pr_state(self._pr_state)
4089 return self
4086 return self
4090
4087
4091 def __exit__(self, exc_type, exc_val, exc_tb):
4088 def __exit__(self, exc_type, exc_val, exc_tb):
4092 if exc_val is not None or exc_type is not None:
4089 if exc_val is not None or exc_type is not None:
4093 log.error(traceback.format_tb(exc_tb))
4090 log.error(traceback.format_tb(exc_tb))
4094 return None
4091 return None
4095
4092
4096 self.set_pr_state(self._org_state)
4093 self.set_pr_state(self._org_state)
4097 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4094 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4098 self._pr, self._org_state)
4095 self._pr, self._org_state)
4099
4096
4100 @property
4097 @property
4101 def state(self):
4098 def state(self):
4102 return self._current_state
4099 return self._current_state
4103
4100
4104 def set_pr_state(self, pr_state):
4101 def set_pr_state(self, pr_state):
4105 try:
4102 try:
4106 self._pr.pull_request_state = pr_state
4103 self._pr.pull_request_state = pr_state
4107 Session().add(self._pr)
4104 Session().add(self._pr)
4108 Session().commit()
4105 Session().commit()
4109 self._current_state = pr_state
4106 self._current_state = pr_state
4110 except Exception:
4107 except Exception:
4111 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4108 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4112 raise
4109 raise
4113
4110
4114
4111
4115 class _PullRequestBase(BaseModel):
4112 class _PullRequestBase(BaseModel):
4116 """
4113 """
4117 Common attributes of pull request and version entries.
4114 Common attributes of pull request and version entries.
4118 """
4115 """
4119
4116
4120 # .status values
4117 # .status values
4121 STATUS_NEW = 'new'
4118 STATUS_NEW = 'new'
4122 STATUS_OPEN = 'open'
4119 STATUS_OPEN = 'open'
4123 STATUS_CLOSED = 'closed'
4120 STATUS_CLOSED = 'closed'
4124
4121
4125 # available states
4122 # available states
4126 STATE_CREATING = 'creating'
4123 STATE_CREATING = 'creating'
4127 STATE_UPDATING = 'updating'
4124 STATE_UPDATING = 'updating'
4128 STATE_MERGING = 'merging'
4125 STATE_MERGING = 'merging'
4129 STATE_CREATED = 'created'
4126 STATE_CREATED = 'created'
4130
4127
4131 title = Column('title', Unicode(255), nullable=True)
4128 title = Column('title', Unicode(255), nullable=True)
4132 description = Column(
4129 description = Column(
4133 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4130 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4134 nullable=True)
4131 nullable=True)
4135 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4132 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4136
4133
4137 # new/open/closed status of pull request (not approve/reject/etc)
4134 # new/open/closed status of pull request (not approve/reject/etc)
4138 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4135 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4139 created_on = Column(
4136 created_on = Column(
4140 'created_on', DateTime(timezone=False), nullable=False,
4137 'created_on', DateTime(timezone=False), nullable=False,
4141 default=datetime.datetime.now)
4138 default=datetime.datetime.now)
4142 updated_on = Column(
4139 updated_on = Column(
4143 'updated_on', DateTime(timezone=False), nullable=False,
4140 'updated_on', DateTime(timezone=False), nullable=False,
4144 default=datetime.datetime.now)
4141 default=datetime.datetime.now)
4145
4142
4146 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4143 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4147
4144
4148 @declared_attr
4145 @declared_attr
4149 def user_id(cls):
4146 def user_id(cls):
4150 return Column(
4147 return Column(
4151 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4148 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4152 unique=None)
4149 unique=None)
4153
4150
4154 # 500 revisions max
4151 # 500 revisions max
4155 _revisions = Column(
4152 _revisions = Column(
4156 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4153 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4157
4154
4158 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4155 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4159
4156
4160 @declared_attr
4157 @declared_attr
4161 def source_repo_id(cls):
4158 def source_repo_id(cls):
4162 # TODO: dan: rename column to source_repo_id
4159 # TODO: dan: rename column to source_repo_id
4163 return Column(
4160 return Column(
4164 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4161 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4165 nullable=False)
4162 nullable=False)
4166
4163
4167 @declared_attr
4164 @declared_attr
4168 def pr_source(cls):
4165 def pr_source(cls):
4169 return relationship(
4166 return relationship(
4170 'Repository',
4167 'Repository',
4171 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4168 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4172 overlaps="pull_requests_source"
4169 overlaps="pull_requests_source"
4173 )
4170 )
4174
4171
4175 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4172 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4176
4173
4177 @hybrid_property
4174 @hybrid_property
4178 def source_ref(self):
4175 def source_ref(self):
4179 return self._source_ref
4176 return self._source_ref
4180
4177
4181 @source_ref.setter
4178 @source_ref.setter
4182 def source_ref(self, val):
4179 def source_ref(self, val):
4183 parts = (val or '').split(':')
4180 parts = (val or '').split(':')
4184 if len(parts) != 3:
4181 if len(parts) != 3:
4185 raise ValueError(
4182 raise ValueError(
4186 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4183 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4187 self._source_ref = safe_str(val)
4184 self._source_ref = safe_str(val)
4188
4185
4189 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4186 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4190
4187
4191 @hybrid_property
4188 @hybrid_property
4192 def target_ref(self):
4189 def target_ref(self):
4193 return self._target_ref
4190 return self._target_ref
4194
4191
4195 @target_ref.setter
4192 @target_ref.setter
4196 def target_ref(self, val):
4193 def target_ref(self, val):
4197 parts = (val or '').split(':')
4194 parts = (val or '').split(':')
4198 if len(parts) != 3:
4195 if len(parts) != 3:
4199 raise ValueError(
4196 raise ValueError(
4200 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4197 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4201 self._target_ref = safe_str(val)
4198 self._target_ref = safe_str(val)
4202
4199
4203 @declared_attr
4200 @declared_attr
4204 def target_repo_id(cls):
4201 def target_repo_id(cls):
4205 # TODO: dan: rename column to target_repo_id
4202 # TODO: dan: rename column to target_repo_id
4206 return Column(
4203 return Column(
4207 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4204 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4208 nullable=False)
4205 nullable=False)
4209
4206
4210 @declared_attr
4207 @declared_attr
4211 def pr_target(cls):
4208 def pr_target(cls):
4212 return relationship(
4209 return relationship(
4213 'Repository',
4210 'Repository',
4214 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4211 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4215 overlaps="pull_requests_target"
4212 overlaps="pull_requests_target"
4216 )
4213 )
4217
4214
4218 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4215 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4219
4216
4220 # TODO: dan: rename column to last_merge_source_rev
4217 # TODO: dan: rename column to last_merge_source_rev
4221 _last_merge_source_rev = Column(
4218 _last_merge_source_rev = Column(
4222 'last_merge_org_rev', String(40), nullable=True)
4219 'last_merge_org_rev', String(40), nullable=True)
4223 # TODO: dan: rename column to last_merge_target_rev
4220 # TODO: dan: rename column to last_merge_target_rev
4224 _last_merge_target_rev = Column(
4221 _last_merge_target_rev = Column(
4225 'last_merge_other_rev', String(40), nullable=True)
4222 'last_merge_other_rev', String(40), nullable=True)
4226 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4223 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4227 last_merge_metadata = Column(
4224 last_merge_metadata = Column(
4228 'last_merge_metadata', MutationObj.as_mutable(
4225 'last_merge_metadata', MutationObj.as_mutable(
4229 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4226 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4230
4227
4231 merge_rev = Column('merge_rev', String(40), nullable=True)
4228 merge_rev = Column('merge_rev', String(40), nullable=True)
4232
4229
4233 reviewer_data = Column(
4230 reviewer_data = Column(
4234 'reviewer_data_json', MutationObj.as_mutable(
4231 'reviewer_data_json', MutationObj.as_mutable(
4235 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4232 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4236
4233
4237 @property
4234 @property
4238 def reviewer_data_json(self):
4235 def reviewer_data_json(self):
4239 return str_json(self.reviewer_data)
4236 return str_json(self.reviewer_data)
4240
4237
4241 @property
4238 @property
4242 def last_merge_metadata_parsed(self):
4239 def last_merge_metadata_parsed(self):
4243 metadata = {}
4240 metadata = {}
4244 if not self.last_merge_metadata:
4241 if not self.last_merge_metadata:
4245 return metadata
4242 return metadata
4246
4243
4247 if hasattr(self.last_merge_metadata, 'de_coerce'):
4244 if hasattr(self.last_merge_metadata, 'de_coerce'):
4248 for k, v in self.last_merge_metadata.de_coerce().items():
4245 for k, v in self.last_merge_metadata.de_coerce().items():
4249 if k in ['target_ref', 'source_ref']:
4246 if k in ['target_ref', 'source_ref']:
4250 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4247 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4251 else:
4248 else:
4252 if hasattr(v, 'de_coerce'):
4249 if hasattr(v, 'de_coerce'):
4253 metadata[k] = v.de_coerce()
4250 metadata[k] = v.de_coerce()
4254 else:
4251 else:
4255 metadata[k] = v
4252 metadata[k] = v
4256 return metadata
4253 return metadata
4257
4254
4258 @property
4255 @property
4259 def work_in_progress(self):
4256 def work_in_progress(self):
4260 """checks if pull request is work in progress by checking the title"""
4257 """checks if pull request is work in progress by checking the title"""
4261 title = self.title.upper()
4258 title = self.title.upper()
4262 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4259 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4263 return True
4260 return True
4264 return False
4261 return False
4265
4262
4266 @property
4263 @property
4267 def title_safe(self):
4264 def title_safe(self):
4268 return self.title\
4265 return self.title\
4269 .replace('{', '{{')\
4266 .replace('{', '{{')\
4270 .replace('}', '}}')
4267 .replace('}', '}}')
4271
4268
4272 @hybrid_property
4269 @hybrid_property
4273 def description_safe(self):
4270 def description_safe(self):
4274 from rhodecode.lib import helpers as h
4271 from rhodecode.lib import helpers as h
4275 return h.escape(self.description)
4272 return h.escape(self.description)
4276
4273
4277 @hybrid_property
4274 @hybrid_property
4278 def revisions(self):
4275 def revisions(self):
4279 return self._revisions.split(':') if self._revisions else []
4276 return self._revisions.split(':') if self._revisions else []
4280
4277
4281 @revisions.setter
4278 @revisions.setter
4282 def revisions(self, val):
4279 def revisions(self, val):
4283 self._revisions = ':'.join(val)
4280 self._revisions = ':'.join(val)
4284
4281
4285 @hybrid_property
4282 @hybrid_property
4286 def last_merge_status(self):
4283 def last_merge_status(self):
4287 return safe_int(self._last_merge_status)
4284 return safe_int(self._last_merge_status)
4288
4285
4289 @last_merge_status.setter
4286 @last_merge_status.setter
4290 def last_merge_status(self, val):
4287 def last_merge_status(self, val):
4291 self._last_merge_status = val
4288 self._last_merge_status = val
4292
4289
4293 @declared_attr
4290 @declared_attr
4294 def author(cls):
4291 def author(cls):
4295 return relationship(
4292 return relationship(
4296 'User', lazy='joined',
4293 'User', lazy='joined',
4297 #TODO, problem that is somehow :?
4294 #TODO, problem that is somehow :?
4298 #back_populates='user_pull_requests'
4295 #back_populates='user_pull_requests'
4299 )
4296 )
4300
4297
4301 @declared_attr
4298 @declared_attr
4302 def source_repo(cls):
4299 def source_repo(cls):
4303 return relationship(
4300 return relationship(
4304 'Repository',
4301 'Repository',
4305 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4302 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4306 overlaps="pr_source"
4303 overlaps="pr_source"
4307 )
4304 )
4308
4305
4309 @property
4306 @property
4310 def source_ref_parts(self):
4307 def source_ref_parts(self):
4311 return self.unicode_to_reference(self.source_ref)
4308 return self.unicode_to_reference(self.source_ref)
4312
4309
4313 @declared_attr
4310 @declared_attr
4314 def target_repo(cls):
4311 def target_repo(cls):
4315 return relationship(
4312 return relationship(
4316 'Repository',
4313 'Repository',
4317 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4314 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4318 overlaps="pr_target"
4315 overlaps="pr_target"
4319 )
4316 )
4320
4317
4321 @property
4318 @property
4322 def target_ref_parts(self):
4319 def target_ref_parts(self):
4323 return self.unicode_to_reference(self.target_ref)
4320 return self.unicode_to_reference(self.target_ref)
4324
4321
4325 @property
4322 @property
4326 def shadow_merge_ref(self):
4323 def shadow_merge_ref(self):
4327 return self.unicode_to_reference(self._shadow_merge_ref)
4324 return self.unicode_to_reference(self._shadow_merge_ref)
4328
4325
4329 @shadow_merge_ref.setter
4326 @shadow_merge_ref.setter
4330 def shadow_merge_ref(self, ref):
4327 def shadow_merge_ref(self, ref):
4331 self._shadow_merge_ref = self.reference_to_unicode(ref)
4328 self._shadow_merge_ref = self.reference_to_unicode(ref)
4332
4329
4333 @staticmethod
4330 @staticmethod
4334 def unicode_to_reference(raw):
4331 def unicode_to_reference(raw):
4335 return unicode_to_reference(raw)
4332 return unicode_to_reference(raw)
4336
4333
4337 @staticmethod
4334 @staticmethod
4338 def reference_to_unicode(ref):
4335 def reference_to_unicode(ref):
4339 return reference_to_unicode(ref)
4336 return reference_to_unicode(ref)
4340
4337
4341 def get_api_data(self, with_merge_state=True):
4338 def get_api_data(self, with_merge_state=True):
4342 from rhodecode.model.pull_request import PullRequestModel
4339 from rhodecode.model.pull_request import PullRequestModel
4343
4340
4344 pull_request = self
4341 pull_request = self
4345 if with_merge_state:
4342 if with_merge_state:
4346 merge_response, merge_status, msg = \
4343 merge_response, merge_status, msg = \
4347 PullRequestModel().merge_status(pull_request)
4344 PullRequestModel().merge_status(pull_request)
4348 merge_state = {
4345 merge_state = {
4349 'status': merge_status,
4346 'status': merge_status,
4350 'message': safe_str(msg),
4347 'message': safe_str(msg),
4351 }
4348 }
4352 else:
4349 else:
4353 merge_state = {'status': 'not_available',
4350 merge_state = {'status': 'not_available',
4354 'message': 'not_available'}
4351 'message': 'not_available'}
4355
4352
4356 merge_data = {
4353 merge_data = {
4357 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4354 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4358 'reference': (
4355 'reference': (
4359 pull_request.shadow_merge_ref.asdict()
4356 pull_request.shadow_merge_ref.asdict()
4360 if pull_request.shadow_merge_ref else None),
4357 if pull_request.shadow_merge_ref else None),
4361 }
4358 }
4362
4359
4363 data = {
4360 data = {
4364 'pull_request_id': pull_request.pull_request_id,
4361 'pull_request_id': pull_request.pull_request_id,
4365 'url': PullRequestModel().get_url(pull_request),
4362 'url': PullRequestModel().get_url(pull_request),
4366 'title': pull_request.title,
4363 'title': pull_request.title,
4367 'description': pull_request.description,
4364 'description': pull_request.description,
4368 'status': pull_request.status,
4365 'status': pull_request.status,
4369 'state': pull_request.pull_request_state,
4366 'state': pull_request.pull_request_state,
4370 'created_on': pull_request.created_on,
4367 'created_on': pull_request.created_on,
4371 'updated_on': pull_request.updated_on,
4368 'updated_on': pull_request.updated_on,
4372 'commit_ids': pull_request.revisions,
4369 'commit_ids': pull_request.revisions,
4373 'review_status': pull_request.calculated_review_status(),
4370 'review_status': pull_request.calculated_review_status(),
4374 'mergeable': merge_state,
4371 'mergeable': merge_state,
4375 'source': {
4372 'source': {
4376 'clone_url': pull_request.source_repo.clone_url(),
4373 'clone_url': pull_request.source_repo.clone_url(),
4377 'repository': pull_request.source_repo.repo_name,
4374 'repository': pull_request.source_repo.repo_name,
4378 'reference': {
4375 'reference': {
4379 'name': pull_request.source_ref_parts.name,
4376 'name': pull_request.source_ref_parts.name,
4380 'type': pull_request.source_ref_parts.type,
4377 'type': pull_request.source_ref_parts.type,
4381 'commit_id': pull_request.source_ref_parts.commit_id,
4378 'commit_id': pull_request.source_ref_parts.commit_id,
4382 },
4379 },
4383 },
4380 },
4384 'target': {
4381 'target': {
4385 'clone_url': pull_request.target_repo.clone_url(),
4382 'clone_url': pull_request.target_repo.clone_url(),
4386 'repository': pull_request.target_repo.repo_name,
4383 'repository': pull_request.target_repo.repo_name,
4387 'reference': {
4384 'reference': {
4388 'name': pull_request.target_ref_parts.name,
4385 'name': pull_request.target_ref_parts.name,
4389 'type': pull_request.target_ref_parts.type,
4386 'type': pull_request.target_ref_parts.type,
4390 'commit_id': pull_request.target_ref_parts.commit_id,
4387 'commit_id': pull_request.target_ref_parts.commit_id,
4391 },
4388 },
4392 },
4389 },
4393 'merge': merge_data,
4390 'merge': merge_data,
4394 'author': pull_request.author.get_api_data(include_secrets=False,
4391 'author': pull_request.author.get_api_data(include_secrets=False,
4395 details='basic'),
4392 details='basic'),
4396 'reviewers': [
4393 'reviewers': [
4397 {
4394 {
4398 'user': reviewer.get_api_data(include_secrets=False,
4395 'user': reviewer.get_api_data(include_secrets=False,
4399 details='basic'),
4396 details='basic'),
4400 'reasons': reasons,
4397 'reasons': reasons,
4401 'review_status': st[0][1].status if st else 'not_reviewed',
4398 'review_status': st[0][1].status if st else 'not_reviewed',
4402 }
4399 }
4403 for obj, reviewer, reasons, mandatory, st in
4400 for obj, reviewer, reasons, mandatory, st in
4404 pull_request.reviewers_statuses()
4401 pull_request.reviewers_statuses()
4405 ]
4402 ]
4406 }
4403 }
4407
4404
4408 return data
4405 return data
4409
4406
4410 def set_state(self, pull_request_state, final_state=None):
4407 def set_state(self, pull_request_state, final_state=None):
4411 """
4408 """
4412 # goes from initial state to updating to initial state.
4409 # goes from initial state to updating to initial state.
4413 # initial state can be changed by specifying back_state=
4410 # initial state can be changed by specifying back_state=
4414 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4411 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4415 pull_request.merge()
4412 pull_request.merge()
4416
4413
4417 :param pull_request_state:
4414 :param pull_request_state:
4418 :param final_state:
4415 :param final_state:
4419
4416
4420 """
4417 """
4421
4418
4422 return _SetState(self, pull_request_state, back_state=final_state)
4419 return _SetState(self, pull_request_state, back_state=final_state)
4423
4420
4424
4421
4425 class PullRequest(Base, _PullRequestBase):
4422 class PullRequest(Base, _PullRequestBase):
4426 __tablename__ = 'pull_requests'
4423 __tablename__ = 'pull_requests'
4427 __table_args__ = (
4424 __table_args__ = (
4428 base_table_args,
4425 base_table_args,
4429 )
4426 )
4430 LATEST_VER = 'latest'
4427 LATEST_VER = 'latest'
4431
4428
4432 pull_request_id = Column(
4429 pull_request_id = Column(
4433 'pull_request_id', Integer(), nullable=False, primary_key=True)
4430 'pull_request_id', Integer(), nullable=False, primary_key=True)
4434
4431
4435 def __repr__(self):
4432 def __repr__(self):
4436 if self.pull_request_id:
4433 if self.pull_request_id:
4437 return f'<DB:PullRequest #{self.pull_request_id}>'
4434 return f'<DB:PullRequest #{self.pull_request_id}>'
4438 else:
4435 else:
4439 return f'<DB:PullRequest at {id(self)!r}>'
4436 return f'<DB:PullRequest at {id(self)!r}>'
4440
4437
4441 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
4438 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
4442 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
4439 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
4443 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
4440 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
4444 versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request')
4441 versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request')
4445
4442
4446 @classmethod
4443 @classmethod
4447 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4444 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4448 internal_methods=None):
4445 internal_methods=None):
4449
4446
4450 class PullRequestDisplay(object):
4447 class PullRequestDisplay(object):
4451 """
4448 """
4452 Special object wrapper for showing PullRequest data via Versions
4449 Special object wrapper for showing PullRequest data via Versions
4453 It mimics PR object as close as possible. This is read only object
4450 It mimics PR object as close as possible. This is read only object
4454 just for display
4451 just for display
4455 """
4452 """
4456
4453
4457 def __init__(self, attrs, internal=None):
4454 def __init__(self, attrs, internal=None):
4458 self.attrs = attrs
4455 self.attrs = attrs
4459 # internal have priority over the given ones via attrs
4456 # internal have priority over the given ones via attrs
4460 self.internal = internal or ['versions']
4457 self.internal = internal or ['versions']
4461
4458
4462 def __getattr__(self, item):
4459 def __getattr__(self, item):
4463 if item in self.internal:
4460 if item in self.internal:
4464 return getattr(self, item)
4461 return getattr(self, item)
4465 try:
4462 try:
4466 return self.attrs[item]
4463 return self.attrs[item]
4467 except KeyError:
4464 except KeyError:
4468 raise AttributeError(
4465 raise AttributeError(
4469 '%s object has no attribute %s' % (self, item))
4466 '%s object has no attribute %s' % (self, item))
4470
4467
4471 def __repr__(self):
4468 def __repr__(self):
4472 pr_id = self.attrs.get('pull_request_id')
4469 pr_id = self.attrs.get('pull_request_id')
4473 return f'<DB:PullRequestDisplay #{pr_id}>'
4470 return f'<DB:PullRequestDisplay #{pr_id}>'
4474
4471
4475 def versions(self):
4472 def versions(self):
4476 return pull_request_obj.versions.order_by(
4473 return pull_request_obj.versions.order_by(
4477 PullRequestVersion.pull_request_version_id).all()
4474 PullRequestVersion.pull_request_version_id).all()
4478
4475
4479 def is_closed(self):
4476 def is_closed(self):
4480 return pull_request_obj.is_closed()
4477 return pull_request_obj.is_closed()
4481
4478
4482 def is_state_changing(self):
4479 def is_state_changing(self):
4483 return pull_request_obj.is_state_changing()
4480 return pull_request_obj.is_state_changing()
4484
4481
4485 @property
4482 @property
4486 def pull_request_version_id(self):
4483 def pull_request_version_id(self):
4487 return getattr(pull_request_obj, 'pull_request_version_id', None)
4484 return getattr(pull_request_obj, 'pull_request_version_id', None)
4488
4485
4489 @property
4486 @property
4490 def pull_request_last_version(self):
4487 def pull_request_last_version(self):
4491 return pull_request_obj.pull_request_last_version
4488 return pull_request_obj.pull_request_last_version
4492
4489
4493 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4490 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4494
4491
4495 attrs.author = StrictAttributeDict(
4492 attrs.author = StrictAttributeDict(
4496 pull_request_obj.author.get_api_data())
4493 pull_request_obj.author.get_api_data())
4497 if pull_request_obj.target_repo:
4494 if pull_request_obj.target_repo:
4498 attrs.target_repo = StrictAttributeDict(
4495 attrs.target_repo = StrictAttributeDict(
4499 pull_request_obj.target_repo.get_api_data())
4496 pull_request_obj.target_repo.get_api_data())
4500 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4497 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4501
4498
4502 if pull_request_obj.source_repo:
4499 if pull_request_obj.source_repo:
4503 attrs.source_repo = StrictAttributeDict(
4500 attrs.source_repo = StrictAttributeDict(
4504 pull_request_obj.source_repo.get_api_data())
4501 pull_request_obj.source_repo.get_api_data())
4505 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4502 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4506
4503
4507 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4504 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4508 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4505 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4509 attrs.revisions = pull_request_obj.revisions
4506 attrs.revisions = pull_request_obj.revisions
4510 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4507 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4511 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4508 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4512 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4509 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4513 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4510 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4514
4511
4515 return PullRequestDisplay(attrs, internal=internal_methods)
4512 return PullRequestDisplay(attrs, internal=internal_methods)
4516
4513
4517 def is_closed(self):
4514 def is_closed(self):
4518 return self.status == self.STATUS_CLOSED
4515 return self.status == self.STATUS_CLOSED
4519
4516
4520 def is_state_changing(self):
4517 def is_state_changing(self):
4521 return self.pull_request_state != PullRequest.STATE_CREATED
4518 return self.pull_request_state != PullRequest.STATE_CREATED
4522
4519
4523 def __json__(self):
4520 def __json__(self):
4524 return {
4521 return {
4525 'revisions': self.revisions,
4522 'revisions': self.revisions,
4526 'versions': self.versions_count
4523 'versions': self.versions_count
4527 }
4524 }
4528
4525
4529 def calculated_review_status(self):
4526 def calculated_review_status(self):
4530 from rhodecode.model.changeset_status import ChangesetStatusModel
4527 from rhodecode.model.changeset_status import ChangesetStatusModel
4531 return ChangesetStatusModel().calculated_review_status(self)
4528 return ChangesetStatusModel().calculated_review_status(self)
4532
4529
4533 def reviewers_statuses(self, user=None):
4530 def reviewers_statuses(self, user=None):
4534 from rhodecode.model.changeset_status import ChangesetStatusModel
4531 from rhodecode.model.changeset_status import ChangesetStatusModel
4535 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4532 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4536
4533
4537 def get_pull_request_reviewers(self, role=None):
4534 def get_pull_request_reviewers(self, role=None):
4538 qry = PullRequestReviewers.query()\
4535 qry = PullRequestReviewers.query()\
4539 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4536 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4540 if role:
4537 if role:
4541 qry = qry.filter(PullRequestReviewers.role == role)
4538 qry = qry.filter(PullRequestReviewers.role == role)
4542
4539
4543 return qry.all()
4540 return qry.all()
4544
4541
4545 @property
4542 @property
4546 def reviewers_count(self):
4543 def reviewers_count(self):
4547 qry = PullRequestReviewers.query()\
4544 qry = PullRequestReviewers.query()\
4548 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4545 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4549 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4546 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4550 return qry.count()
4547 return qry.count()
4551
4548
4552 @property
4549 @property
4553 def observers_count(self):
4550 def observers_count(self):
4554 qry = PullRequestReviewers.query()\
4551 qry = PullRequestReviewers.query()\
4555 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4552 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4556 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4553 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4557 return qry.count()
4554 return qry.count()
4558
4555
4559 def observers(self):
4556 def observers(self):
4560 qry = PullRequestReviewers.query()\
4557 qry = PullRequestReviewers.query()\
4561 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4558 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4562 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4559 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4563 .all()
4560 .all()
4564
4561
4565 for entry in qry:
4562 for entry in qry:
4566 yield entry, entry.user
4563 yield entry, entry.user
4567
4564
4568 @property
4565 @property
4569 def workspace_id(self):
4566 def workspace_id(self):
4570 from rhodecode.model.pull_request import PullRequestModel
4567 from rhodecode.model.pull_request import PullRequestModel
4571 return PullRequestModel()._workspace_id(self)
4568 return PullRequestModel()._workspace_id(self)
4572
4569
4573 def get_shadow_repo(self):
4570 def get_shadow_repo(self):
4574 workspace_id = self.workspace_id
4571 workspace_id = self.workspace_id
4575 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4572 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4576 if os.path.isdir(shadow_repository_path):
4573 if os.path.isdir(shadow_repository_path):
4577 vcs_obj = self.target_repo.scm_instance()
4574 vcs_obj = self.target_repo.scm_instance()
4578 return vcs_obj.get_shadow_instance(shadow_repository_path)
4575 return vcs_obj.get_shadow_instance(shadow_repository_path)
4579
4576
4580 @property
4577 @property
4581 def versions_count(self):
4578 def versions_count(self):
4582 """
4579 """
4583 return number of versions this PR have, e.g a PR that once been
4580 return number of versions this PR have, e.g a PR that once been
4584 updated will have 2 versions
4581 updated will have 2 versions
4585 """
4582 """
4586 return self.versions.count() + 1
4583 return self.versions.count() + 1
4587
4584
4588 @property
4585 @property
4589 def pull_request_last_version(self):
4586 def pull_request_last_version(self):
4590 return self.versions_count
4587 return self.versions_count
4591
4588
4592
4589
4593 class PullRequestVersion(Base, _PullRequestBase):
4590 class PullRequestVersion(Base, _PullRequestBase):
4594 __tablename__ = 'pull_request_versions'
4591 __tablename__ = 'pull_request_versions'
4595 __table_args__ = (
4592 __table_args__ = (
4596 base_table_args,
4593 base_table_args,
4597 )
4594 )
4598
4595
4599 pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True)
4596 pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True)
4600 pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
4597 pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
4601 pull_request = relationship('PullRequest', back_populates='versions')
4598 pull_request = relationship('PullRequest', back_populates='versions')
4602
4599
4603 def __repr__(self):
4600 def __repr__(self):
4604 if self.pull_request_version_id:
4601 if self.pull_request_version_id:
4605 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4602 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4606 else:
4603 else:
4607 return f'<DB:PullRequestVersion at {id(self)!r}>'
4604 return f'<DB:PullRequestVersion at {id(self)!r}>'
4608
4605
4609 @property
4606 @property
4610 def reviewers(self):
4607 def reviewers(self):
4611 return self.pull_request.reviewers
4608 return self.pull_request.reviewers
4612
4609
4613 @property
4610 @property
4614 def versions(self):
4611 def versions(self):
4615 return self.pull_request.versions
4612 return self.pull_request.versions
4616
4613
4617 def is_closed(self):
4614 def is_closed(self):
4618 # calculate from original
4615 # calculate from original
4619 return self.pull_request.status == self.STATUS_CLOSED
4616 return self.pull_request.status == self.STATUS_CLOSED
4620
4617
4621 def is_state_changing(self):
4618 def is_state_changing(self):
4622 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4619 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4623
4620
4624 def calculated_review_status(self):
4621 def calculated_review_status(self):
4625 return self.pull_request.calculated_review_status()
4622 return self.pull_request.calculated_review_status()
4626
4623
4627 def reviewers_statuses(self):
4624 def reviewers_statuses(self):
4628 return self.pull_request.reviewers_statuses()
4625 return self.pull_request.reviewers_statuses()
4629
4626
4630 def observers(self):
4627 def observers(self):
4631 return self.pull_request.observers()
4628 return self.pull_request.observers()
4632
4629
4633
4630
4634 class PullRequestReviewers(Base, BaseModel):
4631 class PullRequestReviewers(Base, BaseModel):
4635 __tablename__ = 'pull_request_reviewers'
4632 __tablename__ = 'pull_request_reviewers'
4636 __table_args__ = (
4633 __table_args__ = (
4637 base_table_args,
4634 base_table_args,
4638 )
4635 )
4639 ROLE_REVIEWER = 'reviewer'
4636 ROLE_REVIEWER = 'reviewer'
4640 ROLE_OBSERVER = 'observer'
4637 ROLE_OBSERVER = 'observer'
4641 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4638 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4642
4639
4643 @hybrid_property
4640 @hybrid_property
4644 def reasons(self):
4641 def reasons(self):
4645 if not self._reasons:
4642 if not self._reasons:
4646 return []
4643 return []
4647 return self._reasons
4644 return self._reasons
4648
4645
4649 @reasons.setter
4646 @reasons.setter
4650 def reasons(self, val):
4647 def reasons(self, val):
4651 val = val or []
4648 val = val or []
4652 if any(not isinstance(x, str) for x in val):
4649 if any(not isinstance(x, str) for x in val):
4653 raise Exception('invalid reasons type, must be list of strings')
4650 raise Exception('invalid reasons type, must be list of strings')
4654 self._reasons = val
4651 self._reasons = val
4655
4652
4656 pull_requests_reviewers_id = Column(
4653 pull_requests_reviewers_id = Column(
4657 'pull_requests_reviewers_id', Integer(), nullable=False,
4654 'pull_requests_reviewers_id', Integer(), nullable=False,
4658 primary_key=True)
4655 primary_key=True)
4659 pull_request_id = Column(
4656 pull_request_id = Column(
4660 "pull_request_id", Integer(),
4657 "pull_request_id", Integer(),
4661 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4658 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4662 user_id = Column(
4659 user_id = Column(
4663 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4660 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4664 _reasons = Column(
4661 _reasons = Column(
4665 'reason', MutationList.as_mutable(
4662 'reason', MutationList.as_mutable(
4666 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4663 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4667
4664
4668 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4665 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4669 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4666 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4670
4667
4671 user = relationship('User')
4668 user = relationship('User')
4672 pull_request = relationship('PullRequest', back_populates='reviewers')
4669 pull_request = relationship('PullRequest', back_populates='reviewers')
4673
4670
4674 rule_data = Column(
4671 rule_data = Column(
4675 'rule_data_json',
4672 'rule_data_json',
4676 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4673 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4677
4674
4678 def rule_user_group_data(self):
4675 def rule_user_group_data(self):
4679 """
4676 """
4680 Returns the voting user group rule data for this reviewer
4677 Returns the voting user group rule data for this reviewer
4681 """
4678 """
4682
4679
4683 if self.rule_data and 'vote_rule' in self.rule_data:
4680 if self.rule_data and 'vote_rule' in self.rule_data:
4684 user_group_data = {}
4681 user_group_data = {}
4685 if 'rule_user_group_entry_id' in self.rule_data:
4682 if 'rule_user_group_entry_id' in self.rule_data:
4686 # means a group with voting rules !
4683 # means a group with voting rules !
4687 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4684 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4688 user_group_data['name'] = self.rule_data['rule_name']
4685 user_group_data['name'] = self.rule_data['rule_name']
4689 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4686 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4690
4687
4691 return user_group_data
4688 return user_group_data
4692
4689
4693 @classmethod
4690 @classmethod
4694 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4691 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4695 qry = PullRequestReviewers.query()\
4692 qry = PullRequestReviewers.query()\
4696 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4693 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4697 if role:
4694 if role:
4698 qry = qry.filter(PullRequestReviewers.role == role)
4695 qry = qry.filter(PullRequestReviewers.role == role)
4699
4696
4700 return qry.all()
4697 return qry.all()
4701
4698
4702 def __repr__(self):
4699 def __repr__(self):
4703 return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>"
4700 return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>"
4704
4701
4705
4702
4706 class Notification(Base, BaseModel):
4703 class Notification(Base, BaseModel):
4707 __tablename__ = 'notifications'
4704 __tablename__ = 'notifications'
4708 __table_args__ = (
4705 __table_args__ = (
4709 Index('notification_type_idx', 'type'),
4706 Index('notification_type_idx', 'type'),
4710 base_table_args,
4707 base_table_args,
4711 )
4708 )
4712
4709
4713 TYPE_CHANGESET_COMMENT = 'cs_comment'
4710 TYPE_CHANGESET_COMMENT = 'cs_comment'
4714 TYPE_MESSAGE = 'message'
4711 TYPE_MESSAGE = 'message'
4715 TYPE_MENTION = 'mention'
4712 TYPE_MENTION = 'mention'
4716 TYPE_REGISTRATION = 'registration'
4713 TYPE_REGISTRATION = 'registration'
4717 TYPE_PULL_REQUEST = 'pull_request'
4714 TYPE_PULL_REQUEST = 'pull_request'
4718 TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
4715 TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
4719 TYPE_PULL_REQUEST_UPDATE = 'pull_request_update'
4716 TYPE_PULL_REQUEST_UPDATE = 'pull_request_update'
4720
4717
4721 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4718 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4722 subject = Column('subject', Unicode(512), nullable=True)
4719 subject = Column('subject', Unicode(512), nullable=True)
4723 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4720 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4724 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4721 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4725 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4722 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4726 type_ = Column('type', Unicode(255))
4723 type_ = Column('type', Unicode(255))
4727
4724
4728 created_by_user = relationship('User', back_populates='user_created_notifications')
4725 created_by_user = relationship('User', back_populates='user_created_notifications')
4729 notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification')
4726 notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification')
4730
4727
4731 @property
4728 @property
4732 def recipients(self):
4729 def recipients(self):
4733 return [x.user for x in UserNotification.query()\
4730 return [x.user for x in UserNotification.query()\
4734 .filter(UserNotification.notification == self)\
4731 .filter(UserNotification.notification == self)\
4735 .order_by(UserNotification.user_id.asc()).all()]
4732 .order_by(UserNotification.user_id.asc()).all()]
4736
4733
4737 @classmethod
4734 @classmethod
4738 def create(cls, created_by, subject, body, recipients, type_=None):
4735 def create(cls, created_by, subject, body, recipients, type_=None):
4739 if type_ is None:
4736 if type_ is None:
4740 type_ = Notification.TYPE_MESSAGE
4737 type_ = Notification.TYPE_MESSAGE
4741
4738
4742 notification = cls()
4739 notification = cls()
4743 notification.created_by_user = created_by
4740 notification.created_by_user = created_by
4744 notification.subject = subject
4741 notification.subject = subject
4745 notification.body = body
4742 notification.body = body
4746 notification.type_ = type_
4743 notification.type_ = type_
4747 notification.created_on = datetime.datetime.now()
4744 notification.created_on = datetime.datetime.now()
4748
4745
4749 # For each recipient link the created notification to his account
4746 # For each recipient link the created notification to his account
4750 for u in recipients:
4747 for u in recipients:
4751 assoc = UserNotification()
4748 assoc = UserNotification()
4752 assoc.user_id = u.user_id
4749 assoc.user_id = u.user_id
4753 assoc.notification = notification
4750 assoc.notification = notification
4754
4751
4755 # if created_by is inside recipients mark his notification
4752 # if created_by is inside recipients mark his notification
4756 # as read
4753 # as read
4757 if u.user_id == created_by.user_id:
4754 if u.user_id == created_by.user_id:
4758 assoc.read = True
4755 assoc.read = True
4759 Session().add(assoc)
4756 Session().add(assoc)
4760
4757
4761 Session().add(notification)
4758 Session().add(notification)
4762
4759
4763 return notification
4760 return notification
4764
4761
4765
4762
4766 class UserNotification(Base, BaseModel):
4763 class UserNotification(Base, BaseModel):
4767 __tablename__ = 'user_to_notification'
4764 __tablename__ = 'user_to_notification'
4768 __table_args__ = (
4765 __table_args__ = (
4769 UniqueConstraint('user_id', 'notification_id'),
4766 UniqueConstraint('user_id', 'notification_id'),
4770 base_table_args
4767 base_table_args
4771 )
4768 )
4772
4769
4773 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4770 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4774 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4771 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4775 read = Column('read', Boolean, default=False)
4772 read = Column('read', Boolean, default=False)
4776 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4773 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4777
4774
4778 user = relationship('User', lazy="joined", back_populates='notifications')
4775 user = relationship('User', lazy="joined", back_populates='notifications')
4779 notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users')
4776 notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users')
4780
4777
4781 def mark_as_read(self):
4778 def mark_as_read(self):
4782 self.read = True
4779 self.read = True
4783 Session().add(self)
4780 Session().add(self)
4784
4781
4785
4782
4786 class UserNotice(Base, BaseModel):
4783 class UserNotice(Base, BaseModel):
4787 __tablename__ = 'user_notices'
4784 __tablename__ = 'user_notices'
4788 __table_args__ = (
4785 __table_args__ = (
4789 base_table_args
4786 base_table_args
4790 )
4787 )
4791
4788
4792 NOTIFICATION_TYPE_MESSAGE = 'message'
4789 NOTIFICATION_TYPE_MESSAGE = 'message'
4793 NOTIFICATION_TYPE_NOTICE = 'notice'
4790 NOTIFICATION_TYPE_NOTICE = 'notice'
4794
4791
4795 NOTIFICATION_LEVEL_INFO = 'info'
4792 NOTIFICATION_LEVEL_INFO = 'info'
4796 NOTIFICATION_LEVEL_WARNING = 'warning'
4793 NOTIFICATION_LEVEL_WARNING = 'warning'
4797 NOTIFICATION_LEVEL_ERROR = 'error'
4794 NOTIFICATION_LEVEL_ERROR = 'error'
4798
4795
4799 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4796 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4800
4797
4801 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4798 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4802 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4799 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4803
4800
4804 notice_read = Column('notice_read', Boolean, default=False)
4801 notice_read = Column('notice_read', Boolean, default=False)
4805
4802
4806 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4803 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4807 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4804 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4808
4805
4809 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4806 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4810 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4807 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4811
4808
4812 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4809 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4813 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4810 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4814
4811
4815 @classmethod
4812 @classmethod
4816 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4813 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4817
4814
4818 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4815 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4819 cls.NOTIFICATION_LEVEL_WARNING,
4816 cls.NOTIFICATION_LEVEL_WARNING,
4820 cls.NOTIFICATION_LEVEL_INFO]:
4817 cls.NOTIFICATION_LEVEL_INFO]:
4821 return
4818 return
4822
4819
4823 from rhodecode.model.user import UserModel
4820 from rhodecode.model.user import UserModel
4824 user = UserModel().get_user(user)
4821 user = UserModel().get_user(user)
4825
4822
4826 new_notice = UserNotice()
4823 new_notice = UserNotice()
4827 if not allow_duplicate:
4824 if not allow_duplicate:
4828 existing_msg = UserNotice().query() \
4825 existing_msg = UserNotice().query() \
4829 .filter(UserNotice.user == user) \
4826 .filter(UserNotice.user == user) \
4830 .filter(UserNotice.notice_body == body) \
4827 .filter(UserNotice.notice_body == body) \
4831 .filter(UserNotice.notice_read == false()) \
4828 .filter(UserNotice.notice_read == false()) \
4832 .scalar()
4829 .scalar()
4833 if existing_msg:
4830 if existing_msg:
4834 log.warning('Ignoring duplicate notice for user %s', user)
4831 log.warning('Ignoring duplicate notice for user %s', user)
4835 return
4832 return
4836
4833
4837 new_notice.user = user
4834 new_notice.user = user
4838 new_notice.notice_subject = subject
4835 new_notice.notice_subject = subject
4839 new_notice.notice_body = body
4836 new_notice.notice_body = body
4840 new_notice.notification_level = notice_level
4837 new_notice.notification_level = notice_level
4841 Session().add(new_notice)
4838 Session().add(new_notice)
4842 Session().commit()
4839 Session().commit()
4843
4840
4844
4841
4845 class Gist(Base, BaseModel):
4842 class Gist(Base, BaseModel):
4846 __tablename__ = 'gists'
4843 __tablename__ = 'gists'
4847 __table_args__ = (
4844 __table_args__ = (
4848 Index('g_gist_access_id_idx', 'gist_access_id'),
4845 Index('g_gist_access_id_idx', 'gist_access_id'),
4849 Index('g_created_on_idx', 'created_on'),
4846 Index('g_created_on_idx', 'created_on'),
4850 base_table_args
4847 base_table_args
4851 )
4848 )
4852
4849
4853 GIST_PUBLIC = 'public'
4850 GIST_PUBLIC = 'public'
4854 GIST_PRIVATE = 'private'
4851 GIST_PRIVATE = 'private'
4855 DEFAULT_FILENAME = 'gistfile1.txt'
4852 DEFAULT_FILENAME = 'gistfile1.txt'
4856
4853
4857 ACL_LEVEL_PUBLIC = 'acl_public'
4854 ACL_LEVEL_PUBLIC = 'acl_public'
4858 ACL_LEVEL_PRIVATE = 'acl_private'
4855 ACL_LEVEL_PRIVATE = 'acl_private'
4859
4856
4860 gist_id = Column('gist_id', Integer(), primary_key=True)
4857 gist_id = Column('gist_id', Integer(), primary_key=True)
4861 gist_access_id = Column('gist_access_id', Unicode(250))
4858 gist_access_id = Column('gist_access_id', Unicode(250))
4862 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4859 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4863 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4860 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4864 gist_expires = Column('gist_expires', Float(53), nullable=False)
4861 gist_expires = Column('gist_expires', Float(53), nullable=False)
4865 gist_type = Column('gist_type', Unicode(128), nullable=False)
4862 gist_type = Column('gist_type', Unicode(128), nullable=False)
4866 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4863 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4867 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4864 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4868 acl_level = Column('acl_level', Unicode(128), nullable=True)
4865 acl_level = Column('acl_level', Unicode(128), nullable=True)
4869
4866
4870 owner = relationship('User', back_populates='user_gists')
4867 owner = relationship('User', back_populates='user_gists')
4871
4868
4872 def __repr__(self):
4869 def __repr__(self):
4873 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
4870 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
4874
4871
4875 @hybrid_property
4872 @hybrid_property
4876 def description_safe(self):
4873 def description_safe(self):
4877 from rhodecode.lib import helpers as h
4874 from rhodecode.lib import helpers as h
4878 return h.escape(self.gist_description)
4875 return h.escape(self.gist_description)
4879
4876
4880 @classmethod
4877 @classmethod
4881 def get_or_404(cls, id_):
4878 def get_or_404(cls, id_):
4882 from pyramid.httpexceptions import HTTPNotFound
4879 from pyramid.httpexceptions import HTTPNotFound
4883
4880
4884 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4881 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4885 if not res:
4882 if not res:
4886 log.debug('WARN: No DB entry with id %s', id_)
4883 log.debug('WARN: No DB entry with id %s', id_)
4887 raise HTTPNotFound()
4884 raise HTTPNotFound()
4888 return res
4885 return res
4889
4886
4890 @classmethod
4887 @classmethod
4891 def get_by_access_id(cls, gist_access_id):
4888 def get_by_access_id(cls, gist_access_id):
4892 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4889 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4893
4890
4894 def gist_url(self):
4891 def gist_url(self):
4895 from rhodecode.model.gist import GistModel
4892 from rhodecode.model.gist import GistModel
4896 return GistModel().get_url(self)
4893 return GistModel().get_url(self)
4897
4894
4898 @classmethod
4895 @classmethod
4899 def base_path(cls):
4896 def base_path(cls):
4900 """
4897 """
4901 Returns base path when all gists are stored
4898 Returns base path when all gists are stored
4902
4899
4903 :param cls:
4900 :param cls:
4904 """
4901 """
4905 from rhodecode.model.gist import GIST_STORE_LOC
4902 from rhodecode.model.gist import GIST_STORE_LOC
4906 q = Session().query(RhodeCodeUi)\
4903 q = Session().query(RhodeCodeUi)\
4907 .filter(RhodeCodeUi.ui_key == URL_SEP)
4904 .filter(RhodeCodeUi.ui_key == URL_SEP)
4908 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4905 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4909 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4906 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4910
4907
4911 def get_api_data(self):
4908 def get_api_data(self):
4912 """
4909 """
4913 Common function for generating gist related data for API
4910 Common function for generating gist related data for API
4914 """
4911 """
4915 gist = self
4912 gist = self
4916 data = {
4913 data = {
4917 'gist_id': gist.gist_id,
4914 'gist_id': gist.gist_id,
4918 'type': gist.gist_type,
4915 'type': gist.gist_type,
4919 'access_id': gist.gist_access_id,
4916 'access_id': gist.gist_access_id,
4920 'description': gist.gist_description,
4917 'description': gist.gist_description,
4921 'url': gist.gist_url(),
4918 'url': gist.gist_url(),
4922 'expires': gist.gist_expires,
4919 'expires': gist.gist_expires,
4923 'created_on': gist.created_on,
4920 'created_on': gist.created_on,
4924 'modified_at': gist.modified_at,
4921 'modified_at': gist.modified_at,
4925 'content': None,
4922 'content': None,
4926 'acl_level': gist.acl_level,
4923 'acl_level': gist.acl_level,
4927 }
4924 }
4928 return data
4925 return data
4929
4926
4930 def __json__(self):
4927 def __json__(self):
4931 data = dict(
4928 data = dict(
4932 )
4929 )
4933 data.update(self.get_api_data())
4930 data.update(self.get_api_data())
4934 return data
4931 return data
4935 # SCM functions
4932 # SCM functions
4936
4933
4937 def scm_instance(self, **kwargs):
4934 def scm_instance(self, **kwargs):
4938 """
4935 """
4939 Get an instance of VCS Repository
4936 Get an instance of VCS Repository
4940
4937
4941 :param kwargs:
4938 :param kwargs:
4942 """
4939 """
4943 from rhodecode.model.gist import GistModel
4940 from rhodecode.model.gist import GistModel
4944 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4941 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4945 return get_vcs_instance(
4942 return get_vcs_instance(
4946 repo_path=safe_str(full_repo_path), create=False,
4943 repo_path=safe_str(full_repo_path), create=False,
4947 _vcs_alias=GistModel.vcs_backend)
4944 _vcs_alias=GistModel.vcs_backend)
4948
4945
4949
4946
4950 class ExternalIdentity(Base, BaseModel):
4947 class ExternalIdentity(Base, BaseModel):
4951 __tablename__ = 'external_identities'
4948 __tablename__ = 'external_identities'
4952 __table_args__ = (
4949 __table_args__ = (
4953 Index('local_user_id_idx', 'local_user_id'),
4950 Index('local_user_id_idx', 'local_user_id'),
4954 Index('external_id_idx', 'external_id'),
4951 Index('external_id_idx', 'external_id'),
4955 base_table_args
4952 base_table_args
4956 )
4953 )
4957
4954
4958 external_id = Column('external_id', Unicode(255), default='', primary_key=True)
4955 external_id = Column('external_id', Unicode(255), default='', primary_key=True)
4959 external_username = Column('external_username', Unicode(1024), default='')
4956 external_username = Column('external_username', Unicode(1024), default='')
4960 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4957 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4961 provider_name = Column('provider_name', Unicode(255), default='', primary_key=True)
4958 provider_name = Column('provider_name', Unicode(255), default='', primary_key=True)
4962 access_token = Column('access_token', String(1024), default='')
4959 access_token = Column('access_token', String(1024), default='')
4963 alt_token = Column('alt_token', String(1024), default='')
4960 alt_token = Column('alt_token', String(1024), default='')
4964 token_secret = Column('token_secret', String(1024), default='')
4961 token_secret = Column('token_secret', String(1024), default='')
4965
4962
4966 @classmethod
4963 @classmethod
4967 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4964 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4968 """
4965 """
4969 Returns ExternalIdentity instance based on search params
4966 Returns ExternalIdentity instance based on search params
4970
4967
4971 :param external_id:
4968 :param external_id:
4972 :param provider_name:
4969 :param provider_name:
4973 :return: ExternalIdentity
4970 :return: ExternalIdentity
4974 """
4971 """
4975 query = cls.query()
4972 query = cls.query()
4976 query = query.filter(cls.external_id == external_id)
4973 query = query.filter(cls.external_id == external_id)
4977 query = query.filter(cls.provider_name == provider_name)
4974 query = query.filter(cls.provider_name == provider_name)
4978 if local_user_id:
4975 if local_user_id:
4979 query = query.filter(cls.local_user_id == local_user_id)
4976 query = query.filter(cls.local_user_id == local_user_id)
4980 return query.first()
4977 return query.first()
4981
4978
4982 @classmethod
4979 @classmethod
4983 def user_by_external_id_and_provider(cls, external_id, provider_name):
4980 def user_by_external_id_and_provider(cls, external_id, provider_name):
4984 """
4981 """
4985 Returns User instance based on search params
4982 Returns User instance based on search params
4986
4983
4987 :param external_id:
4984 :param external_id:
4988 :param provider_name:
4985 :param provider_name:
4989 :return: User
4986 :return: User
4990 """
4987 """
4991 query = User.query()
4988 query = User.query()
4992 query = query.filter(cls.external_id == external_id)
4989 query = query.filter(cls.external_id == external_id)
4993 query = query.filter(cls.provider_name == provider_name)
4990 query = query.filter(cls.provider_name == provider_name)
4994 query = query.filter(User.user_id == cls.local_user_id)
4991 query = query.filter(User.user_id == cls.local_user_id)
4995 return query.first()
4992 return query.first()
4996
4993
4997 @classmethod
4994 @classmethod
4998 def by_local_user_id(cls, local_user_id):
4995 def by_local_user_id(cls, local_user_id):
4999 """
4996 """
5000 Returns all tokens for user
4997 Returns all tokens for user
5001
4998
5002 :param local_user_id:
4999 :param local_user_id:
5003 :return: ExternalIdentity
5000 :return: ExternalIdentity
5004 """
5001 """
5005 query = cls.query()
5002 query = cls.query()
5006 query = query.filter(cls.local_user_id == local_user_id)
5003 query = query.filter(cls.local_user_id == local_user_id)
5007 return query
5004 return query
5008
5005
5009 @classmethod
5006 @classmethod
5010 def load_provider_plugin(cls, plugin_id):
5007 def load_provider_plugin(cls, plugin_id):
5011 from rhodecode.authentication.base import loadplugin
5008 from rhodecode.authentication.base import loadplugin
5012 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
5009 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
5013 auth_plugin = loadplugin(_plugin_id)
5010 auth_plugin = loadplugin(_plugin_id)
5014 return auth_plugin
5011 return auth_plugin
5015
5012
5016
5013
5017 class Integration(Base, BaseModel):
5014 class Integration(Base, BaseModel):
5018 __tablename__ = 'integrations'
5015 __tablename__ = 'integrations'
5019 __table_args__ = (
5016 __table_args__ = (
5020 base_table_args
5017 base_table_args
5021 )
5018 )
5022
5019
5023 integration_id = Column('integration_id', Integer(), primary_key=True)
5020 integration_id = Column('integration_id', Integer(), primary_key=True)
5024 integration_type = Column('integration_type', String(255))
5021 integration_type = Column('integration_type', String(255))
5025 enabled = Column('enabled', Boolean(), nullable=False)
5022 enabled = Column('enabled', Boolean(), nullable=False)
5026 name = Column('name', String(255), nullable=False)
5023 name = Column('name', String(255), nullable=False)
5027 child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False)
5024 child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False)
5028
5025
5029 settings = Column(
5026 settings = Column(
5030 'settings_json', MutationObj.as_mutable(
5027 'settings_json', MutationObj.as_mutable(
5031 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
5028 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
5032 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
5029 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
5033 repo = relationship('Repository', lazy='joined', back_populates='integrations')
5030 repo = relationship('Repository', lazy='joined', back_populates='integrations')
5034
5031
5035 repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
5032 repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
5036 repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations')
5033 repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations')
5037
5034
5038 @property
5035 @property
5039 def scope(self):
5036 def scope(self):
5040 if self.repo:
5037 if self.repo:
5041 return repr(self.repo)
5038 return repr(self.repo)
5042 if self.repo_group:
5039 if self.repo_group:
5043 if self.child_repos_only:
5040 if self.child_repos_only:
5044 return repr(self.repo_group) + ' (child repos only)'
5041 return repr(self.repo_group) + ' (child repos only)'
5045 else:
5042 else:
5046 return repr(self.repo_group) + ' (recursive)'
5043 return repr(self.repo_group) + ' (recursive)'
5047 if self.child_repos_only:
5044 if self.child_repos_only:
5048 return 'root_repos'
5045 return 'root_repos'
5049 return 'global'
5046 return 'global'
5050
5047
5051 def __repr__(self):
5048 def __repr__(self):
5052 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5049 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5053
5050
5054
5051
5055 class RepoReviewRuleUser(Base, BaseModel):
5052 class RepoReviewRuleUser(Base, BaseModel):
5056 __tablename__ = 'repo_review_rules_users'
5053 __tablename__ = 'repo_review_rules_users'
5057 __table_args__ = (
5054 __table_args__ = (
5058 base_table_args
5055 base_table_args
5059 )
5056 )
5060 ROLE_REVIEWER = 'reviewer'
5057 ROLE_REVIEWER = 'reviewer'
5061 ROLE_OBSERVER = 'observer'
5058 ROLE_OBSERVER = 'observer'
5062 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5059 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5063
5060
5064 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5061 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5065 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5062 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5066 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5063 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5067 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5064 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5068 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5065 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5069 user = relationship('User', back_populates='user_review_rules')
5066 user = relationship('User', back_populates='user_review_rules')
5070
5067
5071 def rule_data(self):
5068 def rule_data(self):
5072 return {
5069 return {
5073 'mandatory': self.mandatory,
5070 'mandatory': self.mandatory,
5074 'role': self.role,
5071 'role': self.role,
5075 }
5072 }
5076
5073
5077
5074
5078 class RepoReviewRuleUserGroup(Base, BaseModel):
5075 class RepoReviewRuleUserGroup(Base, BaseModel):
5079 __tablename__ = 'repo_review_rules_users_groups'
5076 __tablename__ = 'repo_review_rules_users_groups'
5080 __table_args__ = (
5077 __table_args__ = (
5081 base_table_args
5078 base_table_args
5082 )
5079 )
5083
5080
5084 VOTE_RULE_ALL = -1
5081 VOTE_RULE_ALL = -1
5085 ROLE_REVIEWER = 'reviewer'
5082 ROLE_REVIEWER = 'reviewer'
5086 ROLE_OBSERVER = 'observer'
5083 ROLE_OBSERVER = 'observer'
5087 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5084 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5088
5085
5089 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5086 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5090 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5087 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5091 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5088 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5092 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5089 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5093 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5090 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5094 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5091 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5095 users_group = relationship('UserGroup')
5092 users_group = relationship('UserGroup')
5096
5093
5097 def rule_data(self):
5094 def rule_data(self):
5098 return {
5095 return {
5099 'mandatory': self.mandatory,
5096 'mandatory': self.mandatory,
5100 'role': self.role,
5097 'role': self.role,
5101 'vote_rule': self.vote_rule
5098 'vote_rule': self.vote_rule
5102 }
5099 }
5103
5100
5104 @property
5101 @property
5105 def vote_rule_label(self):
5102 def vote_rule_label(self):
5106 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5103 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5107 return 'all must vote'
5104 return 'all must vote'
5108 else:
5105 else:
5109 return 'min. vote {}'.format(self.vote_rule)
5106 return 'min. vote {}'.format(self.vote_rule)
5110
5107
5111
5108
5112 class RepoReviewRule(Base, BaseModel):
5109 class RepoReviewRule(Base, BaseModel):
5113 __tablename__ = 'repo_review_rules'
5110 __tablename__ = 'repo_review_rules'
5114 __table_args__ = (
5111 __table_args__ = (
5115 base_table_args
5112 base_table_args
5116 )
5113 )
5117
5114
5118 repo_review_rule_id = Column(
5115 repo_review_rule_id = Column(
5119 'repo_review_rule_id', Integer(), primary_key=True)
5116 'repo_review_rule_id', Integer(), primary_key=True)
5120 repo_id = Column(
5117 repo_id = Column(
5121 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5118 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5122 repo = relationship('Repository', back_populates='review_rules')
5119 repo = relationship('Repository', back_populates='review_rules')
5123
5120
5124 review_rule_name = Column('review_rule_name', String(255))
5121 review_rule_name = Column('review_rule_name', String(255))
5125 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5122 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5126 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5123 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5127 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5124 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5128
5125
5129 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5126 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5130
5127
5131 # Legacy fields, just for backward compat
5128 # Legacy fields, just for backward compat
5132 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5129 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5133 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5130 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5134
5131
5135 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5132 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5136 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5133 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5137
5134
5138 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5135 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5139
5136
5140 rule_users = relationship('RepoReviewRuleUser')
5137 rule_users = relationship('RepoReviewRuleUser')
5141 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5138 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5142
5139
5143 def _validate_pattern(self, value):
5140 def _validate_pattern(self, value):
5144 re.compile('^' + glob2re(value) + '$')
5141 re.compile('^' + glob2re(value) + '$')
5145
5142
5146 @hybrid_property
5143 @hybrid_property
5147 def source_branch_pattern(self):
5144 def source_branch_pattern(self):
5148 return self._branch_pattern or '*'
5145 return self._branch_pattern or '*'
5149
5146
5150 @source_branch_pattern.setter
5147 @source_branch_pattern.setter
5151 def source_branch_pattern(self, value):
5148 def source_branch_pattern(self, value):
5152 self._validate_pattern(value)
5149 self._validate_pattern(value)
5153 self._branch_pattern = value or '*'
5150 self._branch_pattern = value or '*'
5154
5151
5155 @hybrid_property
5152 @hybrid_property
5156 def target_branch_pattern(self):
5153 def target_branch_pattern(self):
5157 return self._target_branch_pattern or '*'
5154 return self._target_branch_pattern or '*'
5158
5155
5159 @target_branch_pattern.setter
5156 @target_branch_pattern.setter
5160 def target_branch_pattern(self, value):
5157 def target_branch_pattern(self, value):
5161 self._validate_pattern(value)
5158 self._validate_pattern(value)
5162 self._target_branch_pattern = value or '*'
5159 self._target_branch_pattern = value or '*'
5163
5160
5164 @hybrid_property
5161 @hybrid_property
5165 def file_pattern(self):
5162 def file_pattern(self):
5166 return self._file_pattern or '*'
5163 return self._file_pattern or '*'
5167
5164
5168 @file_pattern.setter
5165 @file_pattern.setter
5169 def file_pattern(self, value):
5166 def file_pattern(self, value):
5170 self._validate_pattern(value)
5167 self._validate_pattern(value)
5171 self._file_pattern = value or '*'
5168 self._file_pattern = value or '*'
5172
5169
5173 @hybrid_property
5170 @hybrid_property
5174 def forbid_pr_author_to_review(self):
5171 def forbid_pr_author_to_review(self):
5175 return self.pr_author == 'forbid_pr_author'
5172 return self.pr_author == 'forbid_pr_author'
5176
5173
5177 @hybrid_property
5174 @hybrid_property
5178 def include_pr_author_to_review(self):
5175 def include_pr_author_to_review(self):
5179 return self.pr_author == 'include_pr_author'
5176 return self.pr_author == 'include_pr_author'
5180
5177
5181 @hybrid_property
5178 @hybrid_property
5182 def forbid_commit_author_to_review(self):
5179 def forbid_commit_author_to_review(self):
5183 return self.commit_author == 'forbid_commit_author'
5180 return self.commit_author == 'forbid_commit_author'
5184
5181
5185 @hybrid_property
5182 @hybrid_property
5186 def include_commit_author_to_review(self):
5183 def include_commit_author_to_review(self):
5187 return self.commit_author == 'include_commit_author'
5184 return self.commit_author == 'include_commit_author'
5188
5185
5189 def matches(self, source_branch, target_branch, files_changed):
5186 def matches(self, source_branch, target_branch, files_changed):
5190 """
5187 """
5191 Check if this review rule matches a branch/files in a pull request
5188 Check if this review rule matches a branch/files in a pull request
5192
5189
5193 :param source_branch: source branch name for the commit
5190 :param source_branch: source branch name for the commit
5194 :param target_branch: target branch name for the commit
5191 :param target_branch: target branch name for the commit
5195 :param files_changed: list of file paths changed in the pull request
5192 :param files_changed: list of file paths changed in the pull request
5196 """
5193 """
5197
5194
5198 source_branch = source_branch or ''
5195 source_branch = source_branch or ''
5199 target_branch = target_branch or ''
5196 target_branch = target_branch or ''
5200 files_changed = files_changed or []
5197 files_changed = files_changed or []
5201
5198
5202 branch_matches = True
5199 branch_matches = True
5203 if source_branch or target_branch:
5200 if source_branch or target_branch:
5204 if self.source_branch_pattern == '*':
5201 if self.source_branch_pattern == '*':
5205 source_branch_match = True
5202 source_branch_match = True
5206 else:
5203 else:
5207 if self.source_branch_pattern.startswith('re:'):
5204 if self.source_branch_pattern.startswith('re:'):
5208 source_pattern = self.source_branch_pattern[3:]
5205 source_pattern = self.source_branch_pattern[3:]
5209 else:
5206 else:
5210 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5207 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5211 source_branch_regex = re.compile(source_pattern)
5208 source_branch_regex = re.compile(source_pattern)
5212 source_branch_match = bool(source_branch_regex.search(source_branch))
5209 source_branch_match = bool(source_branch_regex.search(source_branch))
5213 if self.target_branch_pattern == '*':
5210 if self.target_branch_pattern == '*':
5214 target_branch_match = True
5211 target_branch_match = True
5215 else:
5212 else:
5216 if self.target_branch_pattern.startswith('re:'):
5213 if self.target_branch_pattern.startswith('re:'):
5217 target_pattern = self.target_branch_pattern[3:]
5214 target_pattern = self.target_branch_pattern[3:]
5218 else:
5215 else:
5219 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5216 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5220 target_branch_regex = re.compile(target_pattern)
5217 target_branch_regex = re.compile(target_pattern)
5221 target_branch_match = bool(target_branch_regex.search(target_branch))
5218 target_branch_match = bool(target_branch_regex.search(target_branch))
5222
5219
5223 branch_matches = source_branch_match and target_branch_match
5220 branch_matches = source_branch_match and target_branch_match
5224
5221
5225 files_matches = True
5222 files_matches = True
5226 if self.file_pattern != '*':
5223 if self.file_pattern != '*':
5227 files_matches = False
5224 files_matches = False
5228 if self.file_pattern.startswith('re:'):
5225 if self.file_pattern.startswith('re:'):
5229 file_pattern = self.file_pattern[3:]
5226 file_pattern = self.file_pattern[3:]
5230 else:
5227 else:
5231 file_pattern = glob2re(self.file_pattern)
5228 file_pattern = glob2re(self.file_pattern)
5232 file_regex = re.compile(file_pattern)
5229 file_regex = re.compile(file_pattern)
5233 for file_data in files_changed:
5230 for file_data in files_changed:
5234 filename = file_data.get('filename')
5231 filename = file_data.get('filename')
5235
5232
5236 if file_regex.search(filename):
5233 if file_regex.search(filename):
5237 files_matches = True
5234 files_matches = True
5238 break
5235 break
5239
5236
5240 return branch_matches and files_matches
5237 return branch_matches and files_matches
5241
5238
5242 @property
5239 @property
5243 def review_users(self):
5240 def review_users(self):
5244 """ Returns the users which this rule applies to """
5241 """ Returns the users which this rule applies to """
5245
5242
5246 users = collections.OrderedDict()
5243 users = collections.OrderedDict()
5247
5244
5248 for rule_user in self.rule_users:
5245 for rule_user in self.rule_users:
5249 if rule_user.user.active:
5246 if rule_user.user.active:
5250 if rule_user.user not in users:
5247 if rule_user.user not in users:
5251 users[rule_user.user.username] = {
5248 users[rule_user.user.username] = {
5252 'user': rule_user.user,
5249 'user': rule_user.user,
5253 'source': 'user',
5250 'source': 'user',
5254 'source_data': {},
5251 'source_data': {},
5255 'data': rule_user.rule_data()
5252 'data': rule_user.rule_data()
5256 }
5253 }
5257
5254
5258 for rule_user_group in self.rule_user_groups:
5255 for rule_user_group in self.rule_user_groups:
5259 source_data = {
5256 source_data = {
5260 'user_group_id': rule_user_group.users_group.users_group_id,
5257 'user_group_id': rule_user_group.users_group.users_group_id,
5261 'name': rule_user_group.users_group.users_group_name,
5258 'name': rule_user_group.users_group.users_group_name,
5262 'members': len(rule_user_group.users_group.members)
5259 'members': len(rule_user_group.users_group.members)
5263 }
5260 }
5264 for member in rule_user_group.users_group.members:
5261 for member in rule_user_group.users_group.members:
5265 if member.user.active:
5262 if member.user.active:
5266 key = member.user.username
5263 key = member.user.username
5267 if key in users:
5264 if key in users:
5268 # skip this member as we have him already
5265 # skip this member as we have him already
5269 # this prevents from override the "first" matched
5266 # this prevents from override the "first" matched
5270 # users with duplicates in multiple groups
5267 # users with duplicates in multiple groups
5271 continue
5268 continue
5272
5269
5273 users[key] = {
5270 users[key] = {
5274 'user': member.user,
5271 'user': member.user,
5275 'source': 'user_group',
5272 'source': 'user_group',
5276 'source_data': source_data,
5273 'source_data': source_data,
5277 'data': rule_user_group.rule_data()
5274 'data': rule_user_group.rule_data()
5278 }
5275 }
5279
5276
5280 return users
5277 return users
5281
5278
5282 def user_group_vote_rule(self, user_id):
5279 def user_group_vote_rule(self, user_id):
5283
5280
5284 rules = []
5281 rules = []
5285 if not self.rule_user_groups:
5282 if not self.rule_user_groups:
5286 return rules
5283 return rules
5287
5284
5288 for user_group in self.rule_user_groups:
5285 for user_group in self.rule_user_groups:
5289 user_group_members = [x.user_id for x in user_group.users_group.members]
5286 user_group_members = [x.user_id for x in user_group.users_group.members]
5290 if user_id in user_group_members:
5287 if user_id in user_group_members:
5291 rules.append(user_group)
5288 rules.append(user_group)
5292 return rules
5289 return rules
5293
5290
5294 def __repr__(self):
5291 def __repr__(self):
5295 return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>'
5292 return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>'
5296
5293
5297
5294
5298 class ScheduleEntry(Base, BaseModel):
5295 class ScheduleEntry(Base, BaseModel):
5299 __tablename__ = 'schedule_entries'
5296 __tablename__ = 'schedule_entries'
5300 __table_args__ = (
5297 __table_args__ = (
5301 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5298 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5302 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5299 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5303 base_table_args,
5300 base_table_args,
5304 )
5301 )
5305 SCHEDULE_TYPE_INTEGER = "integer"
5302 SCHEDULE_TYPE_INTEGER = "integer"
5306 SCHEDULE_TYPE_CRONTAB = "crontab"
5303 SCHEDULE_TYPE_CRONTAB = "crontab"
5307
5304
5308 schedule_types = [SCHEDULE_TYPE_CRONTAB, SCHEDULE_TYPE_INTEGER]
5305 schedule_types = [SCHEDULE_TYPE_CRONTAB, SCHEDULE_TYPE_INTEGER]
5309 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5306 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5310
5307
5311 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5308 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5312 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5309 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5313 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5310 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5314
5311
5315 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5312 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5316 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5313 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5317
5314
5318 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5315 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5319 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5316 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5320
5317
5321 # task
5318 # task
5322 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5319 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5323 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5320 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5324 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5321 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5325 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5322 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5326
5323
5327 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5324 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5328 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5325 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5329
5326
5330 @hybrid_property
5327 @hybrid_property
5331 def schedule_type(self):
5328 def schedule_type(self):
5332 return self._schedule_type
5329 return self._schedule_type
5333
5330
5334 @schedule_type.setter
5331 @schedule_type.setter
5335 def schedule_type(self, val):
5332 def schedule_type(self, val):
5336 if val not in self.schedule_types:
5333 if val not in self.schedule_types:
5337 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5334 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5338 val, self.schedule_type))
5335 val, self.schedule_type))
5339
5336
5340 self._schedule_type = val
5337 self._schedule_type = val
5341
5338
5342 @classmethod
5339 @classmethod
5343 def get_uid(cls, obj):
5340 def get_uid(cls, obj):
5344 args = obj.task_args
5341 args = obj.task_args
5345 kwargs = obj.task_kwargs
5342 kwargs = obj.task_kwargs
5346 if isinstance(args, JsonRaw):
5343 if isinstance(args, JsonRaw):
5347 try:
5344 try:
5348 args = json.loads(args)
5345 args = json.loads(args)
5349 except ValueError:
5346 except ValueError:
5350 args = tuple()
5347 args = tuple()
5351
5348
5352 if isinstance(kwargs, JsonRaw):
5349 if isinstance(kwargs, JsonRaw):
5353 try:
5350 try:
5354 kwargs = json.loads(kwargs)
5351 kwargs = json.loads(kwargs)
5355 except ValueError:
5352 except ValueError:
5356 kwargs = dict()
5353 kwargs = dict()
5357
5354
5358 dot_notation = obj.task_dot_notation
5355 dot_notation = obj.task_dot_notation
5359 val = '.'.join(map(safe_str, [
5356 val = '.'.join(map(safe_str, [
5360 sorted(dot_notation), args, sorted(kwargs.items())]))
5357 sorted(dot_notation), args, sorted(kwargs.items())]))
5361 return sha1(safe_bytes(val))
5358 return sha1(safe_bytes(val))
5362
5359
5363 @classmethod
5360 @classmethod
5364 def get_by_schedule_name(cls, schedule_name):
5361 def get_by_schedule_name(cls, schedule_name):
5365 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5362 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5366
5363
5367 @classmethod
5364 @classmethod
5368 def get_by_schedule_id(cls, schedule_id):
5365 def get_by_schedule_id(cls, schedule_id):
5369 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5366 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5370
5367
5371 @property
5368 @property
5372 def task(self):
5369 def task(self):
5373 return self.task_dot_notation
5370 return self.task_dot_notation
5374
5371
5375 @property
5372 @property
5376 def schedule(self):
5373 def schedule(self):
5377 from rhodecode.lib.celerylib.utils import raw_2_schedule
5374 from rhodecode.lib.celerylib.utils import raw_2_schedule
5378 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5375 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5379 return schedule
5376 return schedule
5380
5377
5381 @property
5378 @property
5382 def args(self):
5379 def args(self):
5383 try:
5380 try:
5384 return list(self.task_args or [])
5381 return list(self.task_args or [])
5385 except ValueError:
5382 except ValueError:
5386 return list()
5383 return list()
5387
5384
5388 @property
5385 @property
5389 def kwargs(self):
5386 def kwargs(self):
5390 try:
5387 try:
5391 return dict(self.task_kwargs or {})
5388 return dict(self.task_kwargs or {})
5392 except ValueError:
5389 except ValueError:
5393 return dict()
5390 return dict()
5394
5391
5395 def _as_raw(self, val, indent=False):
5392 def _as_raw(self, val, indent=False):
5396 if hasattr(val, 'de_coerce'):
5393 if hasattr(val, 'de_coerce'):
5397 val = val.de_coerce()
5394 val = val.de_coerce()
5398 if val:
5395 if val:
5399 if indent:
5396 if indent:
5400 val = ext_json.formatted_str_json(val)
5397 val = ext_json.formatted_str_json(val)
5401 else:
5398 else:
5402 val = ext_json.str_json(val)
5399 val = ext_json.str_json(val)
5403
5400
5404 return val
5401 return val
5405
5402
5406 @property
5403 @property
5407 def schedule_definition_raw(self):
5404 def schedule_definition_raw(self):
5408 return self._as_raw(self.schedule_definition)
5405 return self._as_raw(self.schedule_definition)
5409
5406
5410 def args_raw(self, indent=False):
5407 def args_raw(self, indent=False):
5411 return self._as_raw(self.task_args, indent)
5408 return self._as_raw(self.task_args, indent)
5412
5409
5413 def kwargs_raw(self, indent=False):
5410 def kwargs_raw(self, indent=False):
5414 return self._as_raw(self.task_kwargs, indent)
5411 return self._as_raw(self.task_kwargs, indent)
5415
5412
5416 def __repr__(self):
5413 def __repr__(self):
5417 return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>'
5414 return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>'
5418
5415
5419
5416
5420 @event.listens_for(ScheduleEntry, 'before_update')
5417 @event.listens_for(ScheduleEntry, 'before_update')
5421 def update_task_uid(mapper, connection, target):
5418 def update_task_uid(mapper, connection, target):
5422 target.task_uid = ScheduleEntry.get_uid(target)
5419 target.task_uid = ScheduleEntry.get_uid(target)
5423
5420
5424
5421
5425 @event.listens_for(ScheduleEntry, 'before_insert')
5422 @event.listens_for(ScheduleEntry, 'before_insert')
5426 def set_task_uid(mapper, connection, target):
5423 def set_task_uid(mapper, connection, target):
5427 target.task_uid = ScheduleEntry.get_uid(target)
5424 target.task_uid = ScheduleEntry.get_uid(target)
5428
5425
5429
5426
5430 class _BaseBranchPerms(BaseModel):
5427 class _BaseBranchPerms(BaseModel):
5431 @classmethod
5428 @classmethod
5432 def compute_hash(cls, value):
5429 def compute_hash(cls, value):
5433 return sha1_safe(value)
5430 return sha1_safe(value)
5434
5431
5435 @hybrid_property
5432 @hybrid_property
5436 def branch_pattern(self):
5433 def branch_pattern(self):
5437 return self._branch_pattern or '*'
5434 return self._branch_pattern or '*'
5438
5435
5439 @hybrid_property
5436 @hybrid_property
5440 def branch_hash(self):
5437 def branch_hash(self):
5441 return self._branch_hash
5438 return self._branch_hash
5442
5439
5443 def _validate_glob(self, value):
5440 def _validate_glob(self, value):
5444 re.compile('^' + glob2re(value) + '$')
5441 re.compile('^' + glob2re(value) + '$')
5445
5442
5446 @branch_pattern.setter
5443 @branch_pattern.setter
5447 def branch_pattern(self, value):
5444 def branch_pattern(self, value):
5448 self._validate_glob(value)
5445 self._validate_glob(value)
5449 self._branch_pattern = value or '*'
5446 self._branch_pattern = value or '*'
5450 # set the Hash when setting the branch pattern
5447 # set the Hash when setting the branch pattern
5451 self._branch_hash = self.compute_hash(self._branch_pattern)
5448 self._branch_hash = self.compute_hash(self._branch_pattern)
5452
5449
5453 def matches(self, branch):
5450 def matches(self, branch):
5454 """
5451 """
5455 Check if this the branch matches entry
5452 Check if this the branch matches entry
5456
5453
5457 :param branch: branch name for the commit
5454 :param branch: branch name for the commit
5458 """
5455 """
5459
5456
5460 branch = branch or ''
5457 branch = branch or ''
5461
5458
5462 branch_matches = True
5459 branch_matches = True
5463 if branch:
5460 if branch:
5464 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5461 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5465 branch_matches = bool(branch_regex.search(branch))
5462 branch_matches = bool(branch_regex.search(branch))
5466
5463
5467 return branch_matches
5464 return branch_matches
5468
5465
5469
5466
5470 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5467 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5471 __tablename__ = 'user_to_repo_branch_permissions'
5468 __tablename__ = 'user_to_repo_branch_permissions'
5472 __table_args__ = (
5469 __table_args__ = (
5473 base_table_args
5470 base_table_args
5474 )
5471 )
5475
5472
5476 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5473 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5477
5474
5478 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5475 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5479 repo = relationship('Repository', back_populates='user_branch_perms')
5476 repo = relationship('Repository', back_populates='user_branch_perms')
5480
5477
5481 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5478 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5482 permission = relationship('Permission')
5479 permission = relationship('Permission')
5483
5480
5484 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5481 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5485 user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry')
5482 user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry')
5486
5483
5487 rule_order = Column('rule_order', Integer(), nullable=False)
5484 rule_order = Column('rule_order', Integer(), nullable=False)
5488 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5485 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5489 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5486 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5490
5487
5491 def __repr__(self):
5488 def __repr__(self):
5492 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5489 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5493
5490
5494
5491
5495 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5492 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5496 __tablename__ = 'user_group_to_repo_branch_permissions'
5493 __tablename__ = 'user_group_to_repo_branch_permissions'
5497 __table_args__ = (
5494 __table_args__ = (
5498 base_table_args
5495 base_table_args
5499 )
5496 )
5500
5497
5501 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5498 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5502
5499
5503 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5500 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5504 repo = relationship('Repository', back_populates='user_group_branch_perms')
5501 repo = relationship('Repository', back_populates='user_group_branch_perms')
5505
5502
5506 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5503 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5507 permission = relationship('Permission')
5504 permission = relationship('Permission')
5508
5505
5509 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5506 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5510 user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms')
5507 user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms')
5511
5508
5512 rule_order = Column('rule_order', Integer(), nullable=False)
5509 rule_order = Column('rule_order', Integer(), nullable=False)
5513 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5510 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5514 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5511 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5515
5512
5516 def __repr__(self):
5513 def __repr__(self):
5517 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5514 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5518
5515
5519
5516
5520 class UserBookmark(Base, BaseModel):
5517 class UserBookmark(Base, BaseModel):
5521 __tablename__ = 'user_bookmarks'
5518 __tablename__ = 'user_bookmarks'
5522 __table_args__ = (
5519 __table_args__ = (
5523 UniqueConstraint('user_id', 'bookmark_repo_id'),
5520 UniqueConstraint('user_id', 'bookmark_repo_id'),
5524 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5521 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5525 UniqueConstraint('user_id', 'bookmark_position'),
5522 UniqueConstraint('user_id', 'bookmark_position'),
5526 base_table_args
5523 base_table_args
5527 )
5524 )
5528
5525
5529 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5526 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5530 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5527 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5531 position = Column("bookmark_position", Integer(), nullable=False)
5528 position = Column("bookmark_position", Integer(), nullable=False)
5532 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5529 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5533 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5530 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5534 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5531 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5535
5532
5536 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5533 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5537 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5534 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5538
5535
5539 user = relationship("User")
5536 user = relationship("User")
5540
5537
5541 repository = relationship("Repository")
5538 repository = relationship("Repository")
5542 repository_group = relationship("RepoGroup")
5539 repository_group = relationship("RepoGroup")
5543
5540
5544 @classmethod
5541 @classmethod
5545 def get_by_position_for_user(cls, position, user_id):
5542 def get_by_position_for_user(cls, position, user_id):
5546 return cls.query() \
5543 return cls.query() \
5547 .filter(UserBookmark.user_id == user_id) \
5544 .filter(UserBookmark.user_id == user_id) \
5548 .filter(UserBookmark.position == position).scalar()
5545 .filter(UserBookmark.position == position).scalar()
5549
5546
5550 @classmethod
5547 @classmethod
5551 def get_bookmarks_for_user(cls, user_id, cache=True):
5548 def get_bookmarks_for_user(cls, user_id, cache=True):
5552 bookmarks = cls.query() \
5549 bookmarks = cls.query() \
5553 .filter(UserBookmark.user_id == user_id) \
5550 .filter(UserBookmark.user_id == user_id) \
5554 .options(joinedload(UserBookmark.repository)) \
5551 .options(joinedload(UserBookmark.repository)) \
5555 .options(joinedload(UserBookmark.repository_group)) \
5552 .options(joinedload(UserBookmark.repository_group)) \
5556 .order_by(UserBookmark.position.asc())
5553 .order_by(UserBookmark.position.asc())
5557
5554
5558 if cache:
5555 if cache:
5559 bookmarks = bookmarks.options(
5556 bookmarks = bookmarks.options(
5560 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5557 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5561 )
5558 )
5562
5559
5563 return bookmarks.all()
5560 return bookmarks.all()
5564
5561
5565 def __repr__(self):
5562 def __repr__(self):
5566 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5563 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5567
5564
5568
5565
5569 class FileStore(Base, BaseModel):
5566 class FileStore(Base, BaseModel):
5570 __tablename__ = 'file_store'
5567 __tablename__ = 'file_store'
5571 __table_args__ = (
5568 __table_args__ = (
5572 base_table_args
5569 base_table_args
5573 )
5570 )
5574
5571
5575 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5572 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5576 file_uid = Column('file_uid', String(1024), nullable=False)
5573 file_uid = Column('file_uid', String(1024), nullable=False)
5577 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5574 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5578 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5575 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5579 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5576 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5580
5577
5581 # sha256 hash
5578 # sha256 hash
5582 file_hash = Column('file_hash', String(512), nullable=False)
5579 file_hash = Column('file_hash', String(512), nullable=False)
5583 file_size = Column('file_size', BigInteger(), nullable=False)
5580 file_size = Column('file_size', BigInteger(), nullable=False)
5584
5581
5585 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5582 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5586 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5583 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5587 accessed_count = Column('accessed_count', Integer(), default=0)
5584 accessed_count = Column('accessed_count', Integer(), default=0)
5588
5585
5589 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5586 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5590
5587
5591 # if repo/repo_group reference is set, check for permissions
5588 # if repo/repo_group reference is set, check for permissions
5592 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5589 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5593
5590
5594 # hidden defines an attachment that should be hidden from showing in artifact listing
5591 # hidden defines an attachment that should be hidden from showing in artifact listing
5595 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5592 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5596
5593
5597 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5594 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5598 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts')
5595 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts')
5599
5596
5600 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5597 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5601
5598
5602 # scope limited to user, which requester have access to
5599 # scope limited to user, which requester have access to
5603 scope_user_id = Column(
5600 scope_user_id = Column(
5604 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5601 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5605 nullable=True, unique=None, default=None)
5602 nullable=True, unique=None, default=None)
5606 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts')
5603 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts')
5607
5604
5608 # scope limited to user group, which requester have access to
5605 # scope limited to user group, which requester have access to
5609 scope_user_group_id = Column(
5606 scope_user_group_id = Column(
5610 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5607 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5611 nullable=True, unique=None, default=None)
5608 nullable=True, unique=None, default=None)
5612 user_group = relationship('UserGroup', lazy='joined')
5609 user_group = relationship('UserGroup', lazy='joined')
5613
5610
5614 # scope limited to repo, which requester have access to
5611 # scope limited to repo, which requester have access to
5615 scope_repo_id = Column(
5612 scope_repo_id = Column(
5616 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5613 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5617 nullable=True, unique=None, default=None)
5614 nullable=True, unique=None, default=None)
5618 repo = relationship('Repository', lazy='joined')
5615 repo = relationship('Repository', lazy='joined')
5619
5616
5620 # scope limited to repo group, which requester have access to
5617 # scope limited to repo group, which requester have access to
5621 scope_repo_group_id = Column(
5618 scope_repo_group_id = Column(
5622 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5619 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5623 nullable=True, unique=None, default=None)
5620 nullable=True, unique=None, default=None)
5624 repo_group = relationship('RepoGroup', lazy='joined')
5621 repo_group = relationship('RepoGroup', lazy='joined')
5625
5622
5626 @classmethod
5623 @classmethod
5627 def get_scope(cls, scope_type, scope_id):
5624 def get_scope(cls, scope_type, scope_id):
5628 if scope_type == 'repo':
5625 if scope_type == 'repo':
5629 return f'repo:{scope_id}'
5626 return f'repo:{scope_id}'
5630 elif scope_type == 'repo-group':
5627 elif scope_type == 'repo-group':
5631 return f'repo-group:{scope_id}'
5628 return f'repo-group:{scope_id}'
5632 elif scope_type == 'user':
5629 elif scope_type == 'user':
5633 return f'user:{scope_id}'
5630 return f'user:{scope_id}'
5634 elif scope_type == 'user-group':
5631 elif scope_type == 'user-group':
5635 return f'user-group:{scope_id}'
5632 return f'user-group:{scope_id}'
5636 else:
5633 else:
5637 return scope_type
5634 return scope_type
5638
5635
5639 @classmethod
5636 @classmethod
5640 def get_by_store_uid(cls, file_store_uid, safe=False):
5637 def get_by_store_uid(cls, file_store_uid, safe=False):
5641 if safe:
5638 if safe:
5642 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5639 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5643 else:
5640 else:
5644 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5641 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5645
5642
5646 @classmethod
5643 @classmethod
5647 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5644 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5648 file_description='', enabled=True, hidden=False, check_acl=True,
5645 file_description='', enabled=True, hidden=False, check_acl=True,
5649 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5646 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5650
5647
5651 store_entry = FileStore()
5648 store_entry = FileStore()
5652 store_entry.file_uid = file_uid
5649 store_entry.file_uid = file_uid
5653 store_entry.file_display_name = file_display_name
5650 store_entry.file_display_name = file_display_name
5654 store_entry.file_org_name = filename
5651 store_entry.file_org_name = filename
5655 store_entry.file_size = file_size
5652 store_entry.file_size = file_size
5656 store_entry.file_hash = file_hash
5653 store_entry.file_hash = file_hash
5657 store_entry.file_description = file_description
5654 store_entry.file_description = file_description
5658
5655
5659 store_entry.check_acl = check_acl
5656 store_entry.check_acl = check_acl
5660 store_entry.enabled = enabled
5657 store_entry.enabled = enabled
5661 store_entry.hidden = hidden
5658 store_entry.hidden = hidden
5662
5659
5663 store_entry.user_id = user_id
5660 store_entry.user_id = user_id
5664 store_entry.scope_user_id = scope_user_id
5661 store_entry.scope_user_id = scope_user_id
5665 store_entry.scope_repo_id = scope_repo_id
5662 store_entry.scope_repo_id = scope_repo_id
5666 store_entry.scope_repo_group_id = scope_repo_group_id
5663 store_entry.scope_repo_group_id = scope_repo_group_id
5667
5664
5668 return store_entry
5665 return store_entry
5669
5666
5670 @classmethod
5667 @classmethod
5671 def store_metadata(cls, file_store_id, args, commit=True):
5668 def store_metadata(cls, file_store_id, args, commit=True):
5672 file_store = FileStore.get(file_store_id)
5669 file_store = FileStore.get(file_store_id)
5673 if file_store is None:
5670 if file_store is None:
5674 return
5671 return
5675
5672
5676 for section, key, value, value_type in args:
5673 for section, key, value, value_type in args:
5677 has_key = FileStoreMetadata().query() \
5674 has_key = FileStoreMetadata().query() \
5678 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5675 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5679 .filter(FileStoreMetadata.file_store_meta_section == section) \
5676 .filter(FileStoreMetadata.file_store_meta_section == section) \
5680 .filter(FileStoreMetadata.file_store_meta_key == key) \
5677 .filter(FileStoreMetadata.file_store_meta_key == key) \
5681 .scalar()
5678 .scalar()
5682 if has_key:
5679 if has_key:
5683 msg = 'key `{}` already defined under section `{}` for this file.'\
5680 msg = 'key `{}` already defined under section `{}` for this file.'\
5684 .format(key, section)
5681 .format(key, section)
5685 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5682 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5686
5683
5687 # NOTE(marcink): raises ArtifactMetadataBadValueType
5684 # NOTE(marcink): raises ArtifactMetadataBadValueType
5688 FileStoreMetadata.valid_value_type(value_type)
5685 FileStoreMetadata.valid_value_type(value_type)
5689
5686
5690 meta_entry = FileStoreMetadata()
5687 meta_entry = FileStoreMetadata()
5691 meta_entry.file_store = file_store
5688 meta_entry.file_store = file_store
5692 meta_entry.file_store_meta_section = section
5689 meta_entry.file_store_meta_section = section
5693 meta_entry.file_store_meta_key = key
5690 meta_entry.file_store_meta_key = key
5694 meta_entry.file_store_meta_value_type = value_type
5691 meta_entry.file_store_meta_value_type = value_type
5695 meta_entry.file_store_meta_value = value
5692 meta_entry.file_store_meta_value = value
5696
5693
5697 Session().add(meta_entry)
5694 Session().add(meta_entry)
5698
5695
5699 try:
5696 try:
5700 if commit:
5697 if commit:
5701 Session().commit()
5698 Session().commit()
5702 except IntegrityError:
5699 except IntegrityError:
5703 Session().rollback()
5700 Session().rollback()
5704 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5701 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5705
5702
5706 @classmethod
5703 @classmethod
5707 def bump_access_counter(cls, file_uid, commit=True):
5704 def bump_access_counter(cls, file_uid, commit=True):
5708 FileStore().query()\
5705 FileStore().query()\
5709 .filter(FileStore.file_uid == file_uid)\
5706 .filter(FileStore.file_uid == file_uid)\
5710 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5707 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5711 FileStore.accessed_on: datetime.datetime.now()})
5708 FileStore.accessed_on: datetime.datetime.now()})
5712 if commit:
5709 if commit:
5713 Session().commit()
5710 Session().commit()
5714
5711
5715 def __json__(self):
5712 def __json__(self):
5716 data = {
5713 data = {
5717 'filename': self.file_display_name,
5714 'filename': self.file_display_name,
5718 'filename_org': self.file_org_name,
5715 'filename_org': self.file_org_name,
5719 'file_uid': self.file_uid,
5716 'file_uid': self.file_uid,
5720 'description': self.file_description,
5717 'description': self.file_description,
5721 'hidden': self.hidden,
5718 'hidden': self.hidden,
5722 'size': self.file_size,
5719 'size': self.file_size,
5723 'created_on': self.created_on,
5720 'created_on': self.created_on,
5724 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5721 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5725 'downloaded_times': self.accessed_count,
5722 'downloaded_times': self.accessed_count,
5726 'sha256': self.file_hash,
5723 'sha256': self.file_hash,
5727 'metadata': self.file_metadata,
5724 'metadata': self.file_metadata,
5728 }
5725 }
5729
5726
5730 return data
5727 return data
5731
5728
5732 def __repr__(self):
5729 def __repr__(self):
5733 return f'<FileStore({self.file_store_id})>'
5730 return f'<FileStore({self.file_store_id})>'
5734
5731
5735
5732
5736 class FileStoreMetadata(Base, BaseModel):
5733 class FileStoreMetadata(Base, BaseModel):
5737 __tablename__ = 'file_store_metadata'
5734 __tablename__ = 'file_store_metadata'
5738 __table_args__ = (
5735 __table_args__ = (
5739 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5736 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5740 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5737 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5741 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5738 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5742 base_table_args
5739 base_table_args
5743 )
5740 )
5744 SETTINGS_TYPES = {
5741 SETTINGS_TYPES = {
5745 'str': safe_str,
5742 'str': safe_str,
5746 'int': safe_int,
5743 'int': safe_int,
5747 'unicode': safe_str,
5744 'unicode': safe_str,
5748 'bool': str2bool,
5745 'bool': str2bool,
5749 'list': functools.partial(aslist, sep=',')
5746 'list': functools.partial(aslist, sep=',')
5750 }
5747 }
5751
5748
5752 file_store_meta_id = Column(
5749 file_store_meta_id = Column(
5753 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5750 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5754 primary_key=True)
5751 primary_key=True)
5755 _file_store_meta_section = Column(
5752 _file_store_meta_section = Column(
5756 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5753 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5757 nullable=True, unique=None, default=None)
5754 nullable=True, unique=None, default=None)
5758 _file_store_meta_section_hash = Column(
5755 _file_store_meta_section_hash = Column(
5759 "file_store_meta_section_hash", String(255),
5756 "file_store_meta_section_hash", String(255),
5760 nullable=True, unique=None, default=None)
5757 nullable=True, unique=None, default=None)
5761 _file_store_meta_key = Column(
5758 _file_store_meta_key = Column(
5762 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5759 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5763 nullable=True, unique=None, default=None)
5760 nullable=True, unique=None, default=None)
5764 _file_store_meta_key_hash = Column(
5761 _file_store_meta_key_hash = Column(
5765 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5762 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5766 _file_store_meta_value = Column(
5763 _file_store_meta_value = Column(
5767 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5764 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5768 nullable=True, unique=None, default=None)
5765 nullable=True, unique=None, default=None)
5769 _file_store_meta_value_type = Column(
5766 _file_store_meta_value_type = Column(
5770 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5767 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5771 default='unicode')
5768 default='unicode')
5772
5769
5773 file_store_id = Column(
5770 file_store_id = Column(
5774 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5771 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5775 nullable=True, unique=None, default=None)
5772 nullable=True, unique=None, default=None)
5776
5773
5777 file_store = relationship('FileStore', lazy='joined', viewonly=True)
5774 file_store = relationship('FileStore', lazy='joined', viewonly=True)
5778
5775
5779 @classmethod
5776 @classmethod
5780 def valid_value_type(cls, value):
5777 def valid_value_type(cls, value):
5781 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5778 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5782 raise ArtifactMetadataBadValueType(
5779 raise ArtifactMetadataBadValueType(
5783 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5780 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5784
5781
5785 @hybrid_property
5782 @hybrid_property
5786 def file_store_meta_section(self):
5783 def file_store_meta_section(self):
5787 return self._file_store_meta_section
5784 return self._file_store_meta_section
5788
5785
5789 @file_store_meta_section.setter
5786 @file_store_meta_section.setter
5790 def file_store_meta_section(self, value):
5787 def file_store_meta_section(self, value):
5791 self._file_store_meta_section = value
5788 self._file_store_meta_section = value
5792 self._file_store_meta_section_hash = _hash_key(value)
5789 self._file_store_meta_section_hash = _hash_key(value)
5793
5790
5794 @hybrid_property
5791 @hybrid_property
5795 def file_store_meta_key(self):
5792 def file_store_meta_key(self):
5796 return self._file_store_meta_key
5793 return self._file_store_meta_key
5797
5794
5798 @file_store_meta_key.setter
5795 @file_store_meta_key.setter
5799 def file_store_meta_key(self, value):
5796 def file_store_meta_key(self, value):
5800 self._file_store_meta_key = value
5797 self._file_store_meta_key = value
5801 self._file_store_meta_key_hash = _hash_key(value)
5798 self._file_store_meta_key_hash = _hash_key(value)
5802
5799
5803 @hybrid_property
5800 @hybrid_property
5804 def file_store_meta_value(self):
5801 def file_store_meta_value(self):
5805 val = self._file_store_meta_value
5802 val = self._file_store_meta_value
5806
5803
5807 if self._file_store_meta_value_type:
5804 if self._file_store_meta_value_type:
5808 # e.g unicode.encrypted == unicode
5805 # e.g unicode.encrypted == unicode
5809 _type = self._file_store_meta_value_type.split('.')[0]
5806 _type = self._file_store_meta_value_type.split('.')[0]
5810 # decode the encrypted value if it's encrypted field type
5807 # decode the encrypted value if it's encrypted field type
5811 if '.encrypted' in self._file_store_meta_value_type:
5808 if '.encrypted' in self._file_store_meta_value_type:
5812 cipher = EncryptedTextValue()
5809 cipher = EncryptedTextValue()
5813 val = safe_str(cipher.process_result_value(val, None))
5810 val = safe_str(cipher.process_result_value(val, None))
5814 # do final type conversion
5811 # do final type conversion
5815 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5812 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5816 val = converter(val)
5813 val = converter(val)
5817
5814
5818 return val
5815 return val
5819
5816
5820 @file_store_meta_value.setter
5817 @file_store_meta_value.setter
5821 def file_store_meta_value(self, val):
5818 def file_store_meta_value(self, val):
5822 val = safe_str(val)
5819 val = safe_str(val)
5823 # encode the encrypted value
5820 # encode the encrypted value
5824 if '.encrypted' in self.file_store_meta_value_type:
5821 if '.encrypted' in self.file_store_meta_value_type:
5825 cipher = EncryptedTextValue()
5822 cipher = EncryptedTextValue()
5826 val = safe_str(cipher.process_bind_param(val, None))
5823 val = safe_str(cipher.process_bind_param(val, None))
5827 self._file_store_meta_value = val
5824 self._file_store_meta_value = val
5828
5825
5829 @hybrid_property
5826 @hybrid_property
5830 def file_store_meta_value_type(self):
5827 def file_store_meta_value_type(self):
5831 return self._file_store_meta_value_type
5828 return self._file_store_meta_value_type
5832
5829
5833 @file_store_meta_value_type.setter
5830 @file_store_meta_value_type.setter
5834 def file_store_meta_value_type(self, val):
5831 def file_store_meta_value_type(self, val):
5835 # e.g unicode.encrypted
5832 # e.g unicode.encrypted
5836 self.valid_value_type(val)
5833 self.valid_value_type(val)
5837 self._file_store_meta_value_type = val
5834 self._file_store_meta_value_type = val
5838
5835
5839 def __json__(self):
5836 def __json__(self):
5840 data = {
5837 data = {
5841 'artifact': self.file_store.file_uid,
5838 'artifact': self.file_store.file_uid,
5842 'section': self.file_store_meta_section,
5839 'section': self.file_store_meta_section,
5843 'key': self.file_store_meta_key,
5840 'key': self.file_store_meta_key,
5844 'value': self.file_store_meta_value,
5841 'value': self.file_store_meta_value,
5845 }
5842 }
5846
5843
5847 return data
5844 return data
5848
5845
5849 def __repr__(self):
5846 def __repr__(self):
5850 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section,
5847 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section,
5851 self.file_store_meta_key, self.file_store_meta_value)
5848 self.file_store_meta_key, self.file_store_meta_value)
5852
5849
5853
5850
5854 class DbMigrateVersion(Base, BaseModel):
5851 class DbMigrateVersion(Base, BaseModel):
5855 __tablename__ = 'db_migrate_version'
5852 __tablename__ = 'db_migrate_version'
5856 __table_args__ = (
5853 __table_args__ = (
5857 base_table_args,
5854 base_table_args,
5858 )
5855 )
5859
5856
5860 repository_id = Column('repository_id', String(250), primary_key=True)
5857 repository_id = Column('repository_id', String(250), primary_key=True)
5861 repository_path = Column('repository_path', Text)
5858 repository_path = Column('repository_path', Text)
5862 version = Column('version', Integer)
5859 version = Column('version', Integer)
5863
5860
5864 @classmethod
5861 @classmethod
5865 def set_version(cls, version):
5862 def set_version(cls, version):
5866 """
5863 """
5867 Helper for forcing a different version, usually for debugging purposes via ishell.
5864 Helper for forcing a different version, usually for debugging purposes via ishell.
5868 """
5865 """
5869 ver = DbMigrateVersion.query().first()
5866 ver = DbMigrateVersion.query().first()
5870 ver.version = version
5867 ver.version = version
5871 Session().commit()
5868 Session().commit()
5872
5869
5873
5870
5874 class DbSession(Base, BaseModel):
5871 class DbSession(Base, BaseModel):
5875 __tablename__ = 'db_session'
5872 __tablename__ = 'db_session'
5876 __table_args__ = (
5873 __table_args__ = (
5877 base_table_args,
5874 base_table_args,
5878 )
5875 )
5879
5876
5880 def __repr__(self):
5877 def __repr__(self):
5881 return f'<DB:DbSession({self.id})>'
5878 return f'<DB:DbSession({self.id})>'
5882
5879
5883 id = Column('id', Integer())
5880 id = Column('id', Integer())
5884 namespace = Column('namespace', String(255), primary_key=True)
5881 namespace = Column('namespace', String(255), primary_key=True)
5885 accessed = Column('accessed', DateTime, nullable=False)
5882 accessed = Column('accessed', DateTime, nullable=False)
5886 created = Column('created', DateTime, nullable=False)
5883 created = Column('created', DateTime, nullable=False)
5887 data = Column('data', PickleType, nullable=False)
5884 data = Column('data', PickleType, nullable=False)
@@ -1,634 +1,630 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 this is forms validation classes
20 this is forms validation classes
21 http://formencode.org/module-formencode.validators.html
21 http://formencode.org/module-formencode.validators.html
22 for list off all availible validators
22 for list off all availible validators
23
23
24 we can create our own validators
24 we can create our own validators
25
25
26 The table below outlines the options which can be used in a schema in addition to the validators themselves
26 The table below outlines the options which can be used in a schema in addition to the validators themselves
27 pre_validators [] These validators will be applied before the schema
27 pre_validators [] These validators will be applied before the schema
28 chained_validators [] These validators will be applied after the schema
28 chained_validators [] These validators will be applied after the schema
29 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
29 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
30 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
30 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
31 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
31 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
32 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
32 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
33
33
34
34
35 <name> = formencode.validators.<name of validator>
35 <name> = formencode.validators.<name of validator>
36 <name> must equal form name
36 <name> must equal form name
37 list=[1,2,3,4,5]
37 list=[1,2,3,4,5]
38 for SELECT use formencode.All(OneOf(list), Int())
38 for SELECT use formencode.All(OneOf(list), Int())
39
39
40 """
40 """
41
41
42 import deform
42 import deform
43 import logging
43 import logging
44 import formencode
44 import formencode
45
45
46 from pkg_resources import resource_filename
46 from pkg_resources import resource_filename
47 from formencode import All, Pipe
47 from formencode import All, Pipe
48
48
49 from pyramid.threadlocal import get_current_request
49 from pyramid.threadlocal import get_current_request
50
50
51 from rhodecode import BACKENDS
51 from rhodecode import BACKENDS
52 from rhodecode.lib import helpers
52 from rhodecode.lib import helpers
53 from rhodecode.model import validators as v
53 from rhodecode.model import validators as v
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 deform_templates = resource_filename('deform', 'templates')
58 deform_templates = resource_filename('deform', 'templates')
59 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
59 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
60 search_path = (rhodecode_templates, deform_templates)
60 search_path = (rhodecode_templates, deform_templates)
61
61
62
62
63 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
63 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
64 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
64 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
65 def __call__(self, template_name, **kw):
65 def __call__(self, template_name, **kw):
66 kw['h'] = helpers
66 kw['h'] = helpers
67 kw['request'] = get_current_request()
67 kw['request'] = get_current_request()
68 return self.load(template_name)(**kw)
68 return self.load(template_name)(**kw)
69
69
70
70
71 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
71 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
72 deform.Form.set_default_renderer(form_renderer)
72 deform.Form.set_default_renderer(form_renderer)
73
73
74
74
75 def LoginForm(localizer):
75 def LoginForm(localizer):
76 _ = localizer
76 _ = localizer
77
77
78 class _LoginForm(formencode.Schema):
78 class _LoginForm(formencode.Schema):
79 allow_extra_fields = True
79 allow_extra_fields = True
80 filter_extra_fields = True
80 filter_extra_fields = True
81 username = v.UnicodeString(
81 username = v.UnicodeString(
82 strip=True,
82 strip=True,
83 min=1,
83 min=1,
84 not_empty=True,
84 not_empty=True,
85 messages={
85 messages={
86 'empty': _('Please enter a login'),
86 'empty': _('Please enter a login'),
87 'tooShort': _('Enter a value %(min)i characters long or more')
87 'tooShort': _('Enter a value %(min)i characters long or more')
88 }
88 }
89 )
89 )
90
90
91 password = v.UnicodeString(
91 password = v.UnicodeString(
92 strip=False,
92 strip=False,
93 min=3,
93 min=3,
94 max=72,
94 max=72,
95 not_empty=True,
95 not_empty=True,
96 messages={
96 messages={
97 'empty': _('Please enter a password'),
97 'empty': _('Please enter a password'),
98 'tooShort': _('Enter %(min)i characters or more')}
98 'tooShort': _('Enter %(min)i characters or more')}
99 )
99 )
100
100
101 remember = v.StringBoolean(if_missing=False)
101 remember = v.StringBoolean(if_missing=False)
102
102
103 chained_validators = [v.ValidAuth(localizer)]
103 chained_validators = [v.ValidAuth(localizer)]
104 return _LoginForm
104 return _LoginForm
105
105
106
106
107 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
107 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
108 old_data = old_data or {}
108 old_data = old_data or {}
109 available_languages = available_languages or []
109 available_languages = available_languages or []
110 _ = localizer
110 _ = localizer
111
111
112 class _UserForm(formencode.Schema):
112 class _UserForm(formencode.Schema):
113 allow_extra_fields = True
113 allow_extra_fields = True
114 filter_extra_fields = True
114 filter_extra_fields = True
115 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
115 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
116 v.ValidUsername(localizer, edit, old_data))
116 v.ValidUsername(localizer, edit, old_data))
117 if edit:
117 if edit:
118 new_password = All(
118 new_password = All(
119 v.ValidPassword(localizer),
119 v.ValidPassword(localizer),
120 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
120 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
121 )
121 )
122 password_confirmation = All(
122 password_confirmation = All(
123 v.ValidPassword(localizer),
123 v.ValidPassword(localizer),
124 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
124 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
125 )
125 )
126 admin = v.StringBoolean(if_missing=False)
126 admin = v.StringBoolean(if_missing=False)
127 else:
127 else:
128 password = All(
128 password = All(
129 v.ValidPassword(localizer),
129 v.ValidPassword(localizer),
130 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
130 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
131 )
131 )
132 password_confirmation = All(
132 password_confirmation = All(
133 v.ValidPassword(localizer),
133 v.ValidPassword(localizer),
134 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
134 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
135 )
135 )
136
136
137 password_change = v.StringBoolean(if_missing=False)
137 password_change = v.StringBoolean(if_missing=False)
138 create_repo_group = v.StringBoolean(if_missing=False)
138 create_repo_group = v.StringBoolean(if_missing=False)
139
139
140 active = v.StringBoolean(if_missing=False)
140 active = v.StringBoolean(if_missing=False)
141 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
141 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
142 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
142 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
143 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
143 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
144 description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False,
144 description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False,
145 if_missing='')
145 if_missing='')
146 extern_name = v.UnicodeString(strip=True)
146 extern_name = v.UnicodeString(strip=True)
147 extern_type = v.UnicodeString(strip=True)
147 extern_type = v.UnicodeString(strip=True)
148 language = v.OneOf(available_languages, hideList=False,
148 language = v.OneOf(available_languages, hideList=False,
149 testValueList=True, if_missing=None)
149 testValueList=True, if_missing=None)
150 chained_validators = [v.ValidPasswordsMatch(localizer)]
150 chained_validators = [v.ValidPasswordsMatch(localizer)]
151 return _UserForm
151 return _UserForm
152
152
153
153
154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
155 old_data = old_data or {}
155 old_data = old_data or {}
156 _ = localizer
156 _ = localizer
157
157
158 class _UserGroupForm(formencode.Schema):
158 class _UserGroupForm(formencode.Schema):
159 allow_extra_fields = True
159 allow_extra_fields = True
160 filter_extra_fields = True
160 filter_extra_fields = True
161
161
162 users_group_name = All(
162 users_group_name = All(
163 v.UnicodeString(strip=True, min=1, not_empty=True),
163 v.UnicodeString(strip=True, min=1, not_empty=True),
164 v.ValidUserGroup(localizer, edit, old_data)
164 v.ValidUserGroup(localizer, edit, old_data)
165 )
165 )
166 user_group_description = v.UnicodeString(strip=True, min=1,
166 user_group_description = v.UnicodeString(strip=True, min=1,
167 not_empty=False)
167 not_empty=False)
168
168
169 users_group_active = v.StringBoolean(if_missing=False)
169 users_group_active = v.StringBoolean(if_missing=False)
170
170
171 if edit:
171 if edit:
172 # this is user group owner
172 # this is user group owner
173 user = All(
173 user = All(
174 v.UnicodeString(not_empty=True),
174 v.UnicodeString(not_empty=True),
175 v.ValidRepoUser(localizer, allow_disabled))
175 v.ValidRepoUser(localizer, allow_disabled))
176 return _UserGroupForm
176 return _UserGroupForm
177
177
178
178
179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
180 can_create_in_root=False, allow_disabled=False):
180 can_create_in_root=False, allow_disabled=False):
181 _ = localizer
181 _ = localizer
182 old_data = old_data or {}
182 old_data = old_data or {}
183 available_groups = available_groups or []
183 available_groups = available_groups or []
184
184
185 class _RepoGroupForm(formencode.Schema):
185 class _RepoGroupForm(formencode.Schema):
186 allow_extra_fields = True
186 allow_extra_fields = True
187 filter_extra_fields = False
187 filter_extra_fields = False
188
188
189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
190 v.SlugifyName(localizer),)
190 v.SlugifyName(localizer),)
191 group_description = v.UnicodeString(strip=True, min=1,
191 group_description = v.UnicodeString(strip=True, min=1,
192 not_empty=False)
192 not_empty=False)
193 group_copy_permissions = v.StringBoolean(if_missing=False)
193 group_copy_permissions = v.StringBoolean(if_missing=False)
194
194
195 group_parent_id = v.OneOf(available_groups, hideList=False,
195 group_parent_id = v.OneOf(available_groups, hideList=False,
196 testValueList=True, not_empty=True)
196 testValueList=True, not_empty=True)
197 enable_locking = v.StringBoolean(if_missing=False)
197 enable_locking = v.StringBoolean(if_missing=False)
198 chained_validators = [
198 chained_validators = [
199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
200
200
201 if edit:
201 if edit:
202 # this is repo group owner
202 # this is repo group owner
203 user = All(
203 user = All(
204 v.UnicodeString(not_empty=True),
204 v.UnicodeString(not_empty=True),
205 v.ValidRepoUser(localizer, allow_disabled))
205 v.ValidRepoUser(localizer, allow_disabled))
206 return _RepoGroupForm
206 return _RepoGroupForm
207
207
208
208
209 def RegisterForm(localizer, edit=False, old_data=None):
209 def RegisterForm(localizer, edit=False, old_data=None):
210 _ = localizer
210 _ = localizer
211 old_data = old_data or {}
211 old_data = old_data or {}
212
212
213 class _RegisterForm(formencode.Schema):
213 class _RegisterForm(formencode.Schema):
214 allow_extra_fields = True
214 allow_extra_fields = True
215 filter_extra_fields = True
215 filter_extra_fields = True
216 username = All(
216 username = All(
217 v.ValidUsername(localizer, edit, old_data),
217 v.ValidUsername(localizer, edit, old_data),
218 v.UnicodeString(strip=True, min=1, not_empty=True)
218 v.UnicodeString(strip=True, min=1, not_empty=True)
219 )
219 )
220 password = All(
220 password = All(
221 v.ValidPassword(localizer),
221 v.ValidPassword(localizer),
222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
223 )
223 )
224 password_confirmation = All(
224 password_confirmation = All(
225 v.ValidPassword(localizer),
225 v.ValidPassword(localizer),
226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
227 )
227 )
228 active = v.StringBoolean(if_missing=False)
228 active = v.StringBoolean(if_missing=False)
229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
232
232
233 chained_validators = [v.ValidPasswordsMatch(localizer)]
233 chained_validators = [v.ValidPasswordsMatch(localizer)]
234 return _RegisterForm
234 return _RegisterForm
235
235
236
236
237 def PasswordResetForm(localizer):
237 def PasswordResetForm(localizer):
238 _ = localizer
238 _ = localizer
239
239
240 class _PasswordResetForm(formencode.Schema):
240 class _PasswordResetForm(formencode.Schema):
241 allow_extra_fields = True
241 allow_extra_fields = True
242 filter_extra_fields = True
242 filter_extra_fields = True
243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
244 return _PasswordResetForm
244 return _PasswordResetForm
245
245
246
246
247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False):
247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False):
248 _ = localizer
248 _ = localizer
249 old_data = old_data or {}
249 old_data = old_data or {}
250 repo_groups = repo_groups or []
250 repo_groups = repo_groups or []
251 supported_backends = BACKENDS.keys()
251 supported_backends = BACKENDS.keys()
252
252
253 class _RepoForm(formencode.Schema):
253 class _RepoForm(formencode.Schema):
254 allow_extra_fields = True
254 allow_extra_fields = True
255 filter_extra_fields = False
255 filter_extra_fields = False
256 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
256 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
257 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
257 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
258 repo_group = All(v.CanWriteGroup(localizer, old_data),
258 repo_group = All(v.CanWriteGroup(localizer, old_data),
259 v.OneOf(repo_groups, hideList=True))
259 v.OneOf(repo_groups, hideList=True))
260 repo_type = v.OneOf(supported_backends, required=False,
260 repo_type = v.OneOf(supported_backends, required=False,
261 if_missing=old_data.get('repo_type'))
261 if_missing=old_data.get('repo_type'))
262 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
262 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
263 repo_private = v.StringBoolean(if_missing=False)
263 repo_private = v.StringBoolean(if_missing=False)
264 repo_copy_permissions = v.StringBoolean(if_missing=False)
264 repo_copy_permissions = v.StringBoolean(if_missing=False)
265 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
265 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
266
266
267 repo_enable_statistics = v.StringBoolean(if_missing=False)
267 repo_enable_statistics = v.StringBoolean(if_missing=False)
268 repo_enable_downloads = v.StringBoolean(if_missing=False)
268 repo_enable_downloads = v.StringBoolean(if_missing=False)
269 repo_enable_locking = v.StringBoolean(if_missing=False)
269 repo_enable_locking = v.StringBoolean(if_missing=False)
270
270
271 if edit:
271 if edit:
272 # this is repo owner
272 # this is repo owner
273 user = All(
273 user = All(
274 v.UnicodeString(not_empty=True),
274 v.UnicodeString(not_empty=True),
275 v.ValidRepoUser(localizer, allow_disabled))
275 v.ValidRepoUser(localizer, allow_disabled))
276 clone_uri_change = v.UnicodeString(
276 clone_uri_change = v.UnicodeString(
277 not_empty=False, if_missing=v.Missing)
277 not_empty=False, if_missing=v.Missing)
278
278
279 chained_validators = [v.ValidCloneUri(localizer),
279 chained_validators = [v.ValidCloneUri(localizer),
280 v.ValidRepoName(localizer, edit, old_data)]
280 v.ValidRepoName(localizer, edit, old_data)]
281 return _RepoForm
281 return _RepoForm
282
282
283
283
284 def RepoPermsForm(localizer):
284 def RepoPermsForm(localizer):
285 _ = localizer
285 _ = localizer
286
286
287 class _RepoPermsForm(formencode.Schema):
287 class _RepoPermsForm(formencode.Schema):
288 allow_extra_fields = True
288 allow_extra_fields = True
289 filter_extra_fields = False
289 filter_extra_fields = False
290 chained_validators = [v.ValidPerms(localizer, type_='repo')]
290 chained_validators = [v.ValidPerms(localizer, type_='repo')]
291 return _RepoPermsForm
291 return _RepoPermsForm
292
292
293
293
294 def RepoGroupPermsForm(localizer, valid_recursive_choices):
294 def RepoGroupPermsForm(localizer, valid_recursive_choices):
295 _ = localizer
295 _ = localizer
296
296
297 class _RepoGroupPermsForm(formencode.Schema):
297 class _RepoGroupPermsForm(formencode.Schema):
298 allow_extra_fields = True
298 allow_extra_fields = True
299 filter_extra_fields = False
299 filter_extra_fields = False
300 recursive = v.OneOf(valid_recursive_choices)
300 recursive = v.OneOf(valid_recursive_choices)
301 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
301 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
302 return _RepoGroupPermsForm
302 return _RepoGroupPermsForm
303
303
304
304
305 def UserGroupPermsForm(localizer):
305 def UserGroupPermsForm(localizer):
306 _ = localizer
306 _ = localizer
307
307
308 class _UserPermsForm(formencode.Schema):
308 class _UserPermsForm(formencode.Schema):
309 allow_extra_fields = True
309 allow_extra_fields = True
310 filter_extra_fields = False
310 filter_extra_fields = False
311 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
311 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
312 return _UserPermsForm
312 return _UserPermsForm
313
313
314
314
315 def RepoFieldForm(localizer):
315 def RepoFieldForm(localizer):
316 _ = localizer
316 _ = localizer
317
317
318 class _RepoFieldForm(formencode.Schema):
318 class _RepoFieldForm(formencode.Schema):
319 filter_extra_fields = True
319 filter_extra_fields = True
320 allow_extra_fields = True
320 allow_extra_fields = True
321
321
322 new_field_key = All(v.FieldKey(localizer),
322 new_field_key = All(v.FieldKey(localizer),
323 v.UnicodeString(strip=True, min=3, not_empty=True))
323 v.UnicodeString(strip=True, min=3, not_empty=True))
324 new_field_value = v.UnicodeString(not_empty=False, if_missing='')
324 new_field_value = v.UnicodeString(not_empty=False, if_missing='')
325 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
325 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
326 if_missing='str')
326 if_missing='str')
327 new_field_label = v.UnicodeString(not_empty=False)
327 new_field_label = v.UnicodeString(not_empty=False)
328 new_field_desc = v.UnicodeString(not_empty=False)
328 new_field_desc = v.UnicodeString(not_empty=False)
329 return _RepoFieldForm
329 return _RepoFieldForm
330
330
331
331
332 def RepoForkForm(localizer, edit=False, old_data=None,
332 def RepoForkForm(localizer, edit=False, old_data=None,
333 supported_backends=BACKENDS.keys(), repo_groups=None):
333 supported_backends=BACKENDS.keys(), repo_groups=None):
334 _ = localizer
334 _ = localizer
335 old_data = old_data or {}
335 old_data = old_data or {}
336 repo_groups = repo_groups or []
336 repo_groups = repo_groups or []
337
337
338 class _RepoForkForm(formencode.Schema):
338 class _RepoForkForm(formencode.Schema):
339 allow_extra_fields = True
339 allow_extra_fields = True
340 filter_extra_fields = False
340 filter_extra_fields = False
341 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
341 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
342 v.SlugifyName(localizer))
342 v.SlugifyName(localizer))
343 repo_group = All(v.CanWriteGroup(localizer, ),
343 repo_group = All(v.CanWriteGroup(localizer, ),
344 v.OneOf(repo_groups, hideList=True))
344 v.OneOf(repo_groups, hideList=True))
345 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
345 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
346 description = v.UnicodeString(strip=True, min=1, not_empty=True)
346 description = v.UnicodeString(strip=True, min=1, not_empty=True)
347 private = v.StringBoolean(if_missing=False)
347 private = v.StringBoolean(if_missing=False)
348 copy_permissions = v.StringBoolean(if_missing=False)
348 copy_permissions = v.StringBoolean(if_missing=False)
349 fork_parent_id = v.UnicodeString()
349 fork_parent_id = v.UnicodeString()
350 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
350 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
351 return _RepoForkForm
351 return _RepoForkForm
352
352
353
353
354 def ApplicationSettingsForm(localizer):
354 def ApplicationSettingsForm(localizer):
355 _ = localizer
355 _ = localizer
356
356
357 class _ApplicationSettingsForm(formencode.Schema):
357 class _ApplicationSettingsForm(formencode.Schema):
358 allow_extra_fields = True
358 allow_extra_fields = True
359 filter_extra_fields = False
359 filter_extra_fields = False
360 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
360 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
361 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
361 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
362 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
362 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
363 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
363 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
364 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
364 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
365 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
365 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
366 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
366 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
367 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
367 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
368 return _ApplicationSettingsForm
368 return _ApplicationSettingsForm
369
369
370
370
371 def ApplicationVisualisationForm(localizer):
371 def ApplicationVisualisationForm(localizer):
372 from rhodecode.model.db import Repository
372 from rhodecode.model.db import Repository
373 _ = localizer
373 _ = localizer
374
374
375 class _ApplicationVisualisationForm(formencode.Schema):
375 class _ApplicationVisualisationForm(formencode.Schema):
376 allow_extra_fields = True
376 allow_extra_fields = True
377 filter_extra_fields = False
377 filter_extra_fields = False
378 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
378 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
379 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
379 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
380 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
380 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
381
381
382 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
382 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
383 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
383 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
384 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
384 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
385 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
385 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
386 rhodecode_show_version = v.StringBoolean(if_missing=False)
386 rhodecode_show_version = v.StringBoolean(if_missing=False)
387 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
387 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
388 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
388 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
389 rhodecode_gravatar_url = v.UnicodeString(min=3)
389 rhodecode_gravatar_url = v.UnicodeString(min=3)
390 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
390 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
391 rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID)
391 rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID)
392 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
392 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
393 rhodecode_support_url = v.UnicodeString()
393 rhodecode_support_url = v.UnicodeString()
394 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
394 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
395 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
395 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
396 return _ApplicationVisualisationForm
396 return _ApplicationVisualisationForm
397
397
398
398
399 class _BaseVcsSettingsForm(formencode.Schema):
399 class _BaseVcsSettingsForm(formencode.Schema):
400
400
401 allow_extra_fields = True
401 allow_extra_fields = True
402 filter_extra_fields = False
402 filter_extra_fields = False
403 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
403 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
404 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
404 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
405 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
405 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
406
406
407 # PR/Code-review
407 # PR/Code-review
408 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
408 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
409 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
409 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
410
410
411 # hg
411 # hg
412 extensions_largefiles = v.StringBoolean(if_missing=False)
412 extensions_largefiles = v.StringBoolean(if_missing=False)
413 extensions_evolve = v.StringBoolean(if_missing=False)
413 extensions_evolve = v.StringBoolean(if_missing=False)
414 phases_publish = v.StringBoolean(if_missing=False)
414 phases_publish = v.StringBoolean(if_missing=False)
415
415
416 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
416 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
417 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
417 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
418
418
419 # git
419 # git
420 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
420 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
421 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
421 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
422 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
422 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
423
423
424 # cache
424 # cache
425 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
425 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
426
426
427
427
428 def ApplicationUiSettingsForm(localizer):
428 def ApplicationUiSettingsForm(localizer):
429 _ = localizer
429 _ = localizer
430
430
431 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
431 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
432 web_push_ssl = v.StringBoolean(if_missing=False)
432 web_push_ssl = v.StringBoolean(if_missing=False)
433 paths_root_path = All(
434 v.ValidPath(localizer),
435 v.UnicodeString(strip=True, min=1, not_empty=True)
436 )
437 largefiles_usercache = All(
433 largefiles_usercache = All(
438 v.ValidPath(localizer),
434 v.ValidPath(localizer),
439 v.UnicodeString(strip=True, min=2, not_empty=True))
435 v.UnicodeString(strip=True, min=2, not_empty=True))
440 vcs_git_lfs_store_location = All(
436 vcs_git_lfs_store_location = All(
441 v.ValidPath(localizer),
437 v.ValidPath(localizer),
442 v.UnicodeString(strip=True, min=2, not_empty=True))
438 v.UnicodeString(strip=True, min=2, not_empty=True))
443 extensions_hggit = v.StringBoolean(if_missing=False)
439 extensions_hggit = v.StringBoolean(if_missing=False)
444 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
440 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
445 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
441 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
446 return _ApplicationUiSettingsForm
442 return _ApplicationUiSettingsForm
447
443
448
444
449 def RepoVcsSettingsForm(localizer, repo_name):
445 def RepoVcsSettingsForm(localizer, repo_name):
450 _ = localizer
446 _ = localizer
451
447
452 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
448 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
453 inherit_global_settings = v.StringBoolean(if_missing=False)
449 inherit_global_settings = v.StringBoolean(if_missing=False)
454 new_svn_branch = v.ValidSvnPattern(localizer,
450 new_svn_branch = v.ValidSvnPattern(localizer,
455 section='vcs_svn_branch', repo_name=repo_name)
451 section='vcs_svn_branch', repo_name=repo_name)
456 new_svn_tag = v.ValidSvnPattern(localizer,
452 new_svn_tag = v.ValidSvnPattern(localizer,
457 section='vcs_svn_tag', repo_name=repo_name)
453 section='vcs_svn_tag', repo_name=repo_name)
458 return _RepoVcsSettingsForm
454 return _RepoVcsSettingsForm
459
455
460
456
461 def LabsSettingsForm(localizer):
457 def LabsSettingsForm(localizer):
462 _ = localizer
458 _ = localizer
463
459
464 class _LabSettingsForm(formencode.Schema):
460 class _LabSettingsForm(formencode.Schema):
465 allow_extra_fields = True
461 allow_extra_fields = True
466 filter_extra_fields = False
462 filter_extra_fields = False
467 return _LabSettingsForm
463 return _LabSettingsForm
468
464
469
465
470 def ApplicationPermissionsForm(
466 def ApplicationPermissionsForm(
471 localizer, register_choices, password_reset_choices,
467 localizer, register_choices, password_reset_choices,
472 extern_activate_choices):
468 extern_activate_choices):
473 _ = localizer
469 _ = localizer
474
470
475 class _DefaultPermissionsForm(formencode.Schema):
471 class _DefaultPermissionsForm(formencode.Schema):
476 allow_extra_fields = True
472 allow_extra_fields = True
477 filter_extra_fields = True
473 filter_extra_fields = True
478
474
479 anonymous = v.StringBoolean(if_missing=False)
475 anonymous = v.StringBoolean(if_missing=False)
480 default_register = v.OneOf(register_choices)
476 default_register = v.OneOf(register_choices)
481 default_register_message = v.UnicodeString()
477 default_register_message = v.UnicodeString()
482 default_password_reset = v.OneOf(password_reset_choices)
478 default_password_reset = v.OneOf(password_reset_choices)
483 default_extern_activate = v.OneOf(extern_activate_choices)
479 default_extern_activate = v.OneOf(extern_activate_choices)
484 return _DefaultPermissionsForm
480 return _DefaultPermissionsForm
485
481
486
482
487 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
483 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
488 user_group_perms_choices):
484 user_group_perms_choices):
489 _ = localizer
485 _ = localizer
490
486
491 class _ObjectPermissionsForm(formencode.Schema):
487 class _ObjectPermissionsForm(formencode.Schema):
492 allow_extra_fields = True
488 allow_extra_fields = True
493 filter_extra_fields = True
489 filter_extra_fields = True
494 overwrite_default_repo = v.StringBoolean(if_missing=False)
490 overwrite_default_repo = v.StringBoolean(if_missing=False)
495 overwrite_default_group = v.StringBoolean(if_missing=False)
491 overwrite_default_group = v.StringBoolean(if_missing=False)
496 overwrite_default_user_group = v.StringBoolean(if_missing=False)
492 overwrite_default_user_group = v.StringBoolean(if_missing=False)
497
493
498 default_repo_perm = v.OneOf(repo_perms_choices)
494 default_repo_perm = v.OneOf(repo_perms_choices)
499 default_group_perm = v.OneOf(group_perms_choices)
495 default_group_perm = v.OneOf(group_perms_choices)
500 default_user_group_perm = v.OneOf(user_group_perms_choices)
496 default_user_group_perm = v.OneOf(user_group_perms_choices)
501
497
502 return _ObjectPermissionsForm
498 return _ObjectPermissionsForm
503
499
504
500
505 def BranchPermissionsForm(localizer, branch_perms_choices):
501 def BranchPermissionsForm(localizer, branch_perms_choices):
506 _ = localizer
502 _ = localizer
507
503
508 class _BranchPermissionsForm(formencode.Schema):
504 class _BranchPermissionsForm(formencode.Schema):
509 allow_extra_fields = True
505 allow_extra_fields = True
510 filter_extra_fields = True
506 filter_extra_fields = True
511 overwrite_default_branch = v.StringBoolean(if_missing=False)
507 overwrite_default_branch = v.StringBoolean(if_missing=False)
512 default_branch_perm = v.OneOf(branch_perms_choices)
508 default_branch_perm = v.OneOf(branch_perms_choices)
513
509
514 return _BranchPermissionsForm
510 return _BranchPermissionsForm
515
511
516
512
517 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
513 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
518 repo_group_create_choices, user_group_create_choices,
514 repo_group_create_choices, user_group_create_choices,
519 fork_choices, inherit_default_permissions_choices):
515 fork_choices, inherit_default_permissions_choices):
520 _ = localizer
516 _ = localizer
521
517
522 class _DefaultPermissionsForm(formencode.Schema):
518 class _DefaultPermissionsForm(formencode.Schema):
523 allow_extra_fields = True
519 allow_extra_fields = True
524 filter_extra_fields = True
520 filter_extra_fields = True
525
521
526 anonymous = v.StringBoolean(if_missing=False)
522 anonymous = v.StringBoolean(if_missing=False)
527
523
528 default_repo_create = v.OneOf(create_choices)
524 default_repo_create = v.OneOf(create_choices)
529 default_repo_create_on_write = v.OneOf(create_on_write_choices)
525 default_repo_create_on_write = v.OneOf(create_on_write_choices)
530 default_user_group_create = v.OneOf(user_group_create_choices)
526 default_user_group_create = v.OneOf(user_group_create_choices)
531 default_repo_group_create = v.OneOf(repo_group_create_choices)
527 default_repo_group_create = v.OneOf(repo_group_create_choices)
532 default_fork_create = v.OneOf(fork_choices)
528 default_fork_create = v.OneOf(fork_choices)
533 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
529 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
534 return _DefaultPermissionsForm
530 return _DefaultPermissionsForm
535
531
536
532
537 def UserIndividualPermissionsForm(localizer):
533 def UserIndividualPermissionsForm(localizer):
538 _ = localizer
534 _ = localizer
539
535
540 class _DefaultPermissionsForm(formencode.Schema):
536 class _DefaultPermissionsForm(formencode.Schema):
541 allow_extra_fields = True
537 allow_extra_fields = True
542 filter_extra_fields = True
538 filter_extra_fields = True
543
539
544 inherit_default_permissions = v.StringBoolean(if_missing=False)
540 inherit_default_permissions = v.StringBoolean(if_missing=False)
545 return _DefaultPermissionsForm
541 return _DefaultPermissionsForm
546
542
547
543
548 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
544 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
549 _ = localizer
545 _ = localizer
550 old_data = old_data or {}
546 old_data = old_data or {}
551
547
552 class _DefaultsForm(formencode.Schema):
548 class _DefaultsForm(formencode.Schema):
553 allow_extra_fields = True
549 allow_extra_fields = True
554 filter_extra_fields = True
550 filter_extra_fields = True
555 default_repo_type = v.OneOf(supported_backends)
551 default_repo_type = v.OneOf(supported_backends)
556 default_repo_private = v.StringBoolean(if_missing=False)
552 default_repo_private = v.StringBoolean(if_missing=False)
557 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
553 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
558 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
554 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
559 default_repo_enable_locking = v.StringBoolean(if_missing=False)
555 default_repo_enable_locking = v.StringBoolean(if_missing=False)
560 return _DefaultsForm
556 return _DefaultsForm
561
557
562
558
563 def AuthSettingsForm(localizer):
559 def AuthSettingsForm(localizer):
564 _ = localizer
560 _ = localizer
565
561
566 class _AuthSettingsForm(formencode.Schema):
562 class _AuthSettingsForm(formencode.Schema):
567 allow_extra_fields = True
563 allow_extra_fields = True
568 filter_extra_fields = True
564 filter_extra_fields = True
569 auth_plugins = All(v.ValidAuthPlugins(localizer),
565 auth_plugins = All(v.ValidAuthPlugins(localizer),
570 v.UniqueListFromString(localizer)(not_empty=True))
566 v.UniqueListFromString(localizer)(not_empty=True))
571 return _AuthSettingsForm
567 return _AuthSettingsForm
572
568
573
569
574 def UserExtraEmailForm(localizer):
570 def UserExtraEmailForm(localizer):
575 _ = localizer
571 _ = localizer
576
572
577 class _UserExtraEmailForm(formencode.Schema):
573 class _UserExtraEmailForm(formencode.Schema):
578 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
574 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
579 return _UserExtraEmailForm
575 return _UserExtraEmailForm
580
576
581
577
582 def UserExtraIpForm(localizer):
578 def UserExtraIpForm(localizer):
583 _ = localizer
579 _ = localizer
584
580
585 class _UserExtraIpForm(formencode.Schema):
581 class _UserExtraIpForm(formencode.Schema):
586 ip = v.ValidIp(localizer)(not_empty=True)
582 ip = v.ValidIp(localizer)(not_empty=True)
587 return _UserExtraIpForm
583 return _UserExtraIpForm
588
584
589
585
590 def PullRequestForm(localizer, repo_id):
586 def PullRequestForm(localizer, repo_id):
591 _ = localizer
587 _ = localizer
592
588
593 class ReviewerForm(formencode.Schema):
589 class ReviewerForm(formencode.Schema):
594 user_id = v.Int(not_empty=True)
590 user_id = v.Int(not_empty=True)
595 reasons = All()
591 reasons = All()
596 rules = All(v.UniqueList(localizer, convert=int)())
592 rules = All(v.UniqueList(localizer, convert=int)())
597 mandatory = v.StringBoolean()
593 mandatory = v.StringBoolean()
598 role = v.String(if_missing='reviewer')
594 role = v.String(if_missing='reviewer')
599
595
600 class ObserverForm(formencode.Schema):
596 class ObserverForm(formencode.Schema):
601 user_id = v.Int(not_empty=True)
597 user_id = v.Int(not_empty=True)
602 reasons = All()
598 reasons = All()
603 rules = All(v.UniqueList(localizer, convert=int)())
599 rules = All(v.UniqueList(localizer, convert=int)())
604 mandatory = v.StringBoolean()
600 mandatory = v.StringBoolean()
605 role = v.String(if_missing='observer')
601 role = v.String(if_missing='observer')
606
602
607 class _PullRequestForm(formencode.Schema):
603 class _PullRequestForm(formencode.Schema):
608 allow_extra_fields = True
604 allow_extra_fields = True
609 filter_extra_fields = True
605 filter_extra_fields = True
610
606
611 common_ancestor = v.UnicodeString(strip=True, required=True)
607 common_ancestor = v.UnicodeString(strip=True, required=True)
612 source_repo = v.UnicodeString(strip=True, required=True)
608 source_repo = v.UnicodeString(strip=True, required=True)
613 source_ref = v.UnicodeString(strip=True, required=True)
609 source_ref = v.UnicodeString(strip=True, required=True)
614 target_repo = v.UnicodeString(strip=True, required=True)
610 target_repo = v.UnicodeString(strip=True, required=True)
615 target_ref = v.UnicodeString(strip=True, required=True)
611 target_ref = v.UnicodeString(strip=True, required=True)
616 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
612 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
617 v.UniqueList(localizer)(not_empty=True))
613 v.UniqueList(localizer)(not_empty=True))
618 review_members = formencode.ForEach(ReviewerForm())
614 review_members = formencode.ForEach(ReviewerForm())
619 observer_members = formencode.ForEach(ObserverForm())
615 observer_members = formencode.ForEach(ObserverForm())
620 pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255)
616 pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255)
621 pullrequest_desc = v.UnicodeString(strip=True, required=False)
617 pullrequest_desc = v.UnicodeString(strip=True, required=False)
622 description_renderer = v.UnicodeString(strip=True, required=False)
618 description_renderer = v.UnicodeString(strip=True, required=False)
623
619
624 return _PullRequestForm
620 return _PullRequestForm
625
621
626
622
627 def IssueTrackerPatternsForm(localizer):
623 def IssueTrackerPatternsForm(localizer):
628 _ = localizer
624 _ = localizer
629
625
630 class _IssueTrackerPatternsForm(formencode.Schema):
626 class _IssueTrackerPatternsForm(formencode.Schema):
631 allow_extra_fields = True
627 allow_extra_fields = True
632 filter_extra_fields = False
628 filter_extra_fields = False
633 chained_validators = [v.ValidPattern(localizer)]
629 chained_validators = [v.ValidPattern(localizer)]
634 return _IssueTrackerPatternsForm
630 return _IssueTrackerPatternsForm
@@ -1,1203 +1,1195 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import re
20 import re
21 import shutil
21 import shutil
22 import time
22 import time
23 import logging
23 import logging
24 import traceback
24 import traceback
25 import datetime
25 import datetime
26
26
27 from pyramid.threadlocal import get_current_request
27 from pyramid.threadlocal import get_current_request
28 from sqlalchemy.orm import aliased
28 from sqlalchemy.orm import aliased
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode import events
31 from rhodecode import events
32 from rhodecode.lib.auth import HasUserGroupPermissionAny
32 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.caching_query import FromCache
33 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
35 from rhodecode.lib import hooks_base
35 from rhodecode.lib import hooks_base
36 from rhodecode.lib.user_log_filter import user_log_filter
36 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.utils import make_db_config
37 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils2 import (
38 from rhodecode.lib.utils2 import (
39 safe_str, remove_prefix, obfuscate_url_pw,
39 safe_str, remove_prefix, obfuscate_url_pw,
40 get_current_rhodecode_user, safe_int, action_logger_generic)
40 get_current_rhodecode_user, safe_int, action_logger_generic)
41 from rhodecode.lib.vcs.backends import get_backend
41 from rhodecode.lib.vcs.backends import get_backend
42 from rhodecode.lib.vcs.nodes import NodeKind
42 from rhodecode.lib.vcs.nodes import NodeKind
43 from rhodecode.model import BaseModel
43 from rhodecode.model import BaseModel
44 from rhodecode.model.db import (
44 from rhodecode.model.db import (
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class RepoModel(BaseModel):
55 class RepoModel(BaseModel):
56
56
57 cls = Repository
57 cls = Repository
58
58
59 def _get_user_group(self, users_group):
59 def _get_user_group(self, users_group):
60 return self._get_instance(UserGroup, users_group,
60 return self._get_instance(UserGroup, users_group,
61 callback=UserGroup.get_by_group_name)
61 callback=UserGroup.get_by_group_name)
62
62
63 def _get_repo_group(self, repo_group):
63 def _get_repo_group(self, repo_group):
64 return self._get_instance(RepoGroup, repo_group,
64 return self._get_instance(RepoGroup, repo_group,
65 callback=RepoGroup.get_by_group_name)
65 callback=RepoGroup.get_by_group_name)
66
66
67 def _create_default_perms(self, repository, private):
67 def _create_default_perms(self, repository, private):
68 # create default permission
68 # create default permission
69 default = 'repository.read'
69 default = 'repository.read'
70 def_user = User.get_default_user()
70 def_user = User.get_default_user()
71 for p in def_user.user_perms:
71 for p in def_user.user_perms:
72 if p.permission.permission_name.startswith('repository.'):
72 if p.permission.permission_name.startswith('repository.'):
73 default = p.permission.permission_name
73 default = p.permission.permission_name
74 break
74 break
75
75
76 default_perm = 'repository.none' if private else default
76 default_perm = 'repository.none' if private else default
77
77
78 repo_to_perm = UserRepoToPerm()
78 repo_to_perm = UserRepoToPerm()
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
80
80
81 repo_to_perm.repository = repository
81 repo_to_perm.repository = repository
82 repo_to_perm.user = def_user
82 repo_to_perm.user = def_user
83
83
84 return repo_to_perm
84 return repo_to_perm
85
85
86 @LazyProperty
87 def repos_path(self):
88 """
89 Gets the repositories root path from database
90 """
91 settings_model = VcsSettingsModel(sa=self.sa)
92 return settings_model.get_repos_location()
93
94 def get(self, repo_id):
86 def get(self, repo_id):
95 repo = self.sa.query(Repository) \
87 repo = self.sa.query(Repository) \
96 .filter(Repository.repo_id == repo_id)
88 .filter(Repository.repo_id == repo_id)
97
89
98 return repo.scalar()
90 return repo.scalar()
99
91
100 def get_repo(self, repository):
92 def get_repo(self, repository):
101 return self._get_repo(repository)
93 return self._get_repo(repository)
102
94
103 def get_by_repo_name(self, repo_name, cache=False):
95 def get_by_repo_name(self, repo_name, cache=False):
104 repo = self.sa.query(Repository) \
96 repo = self.sa.query(Repository) \
105 .filter(Repository.repo_name == repo_name)
97 .filter(Repository.repo_name == repo_name)
106
98
107 if cache:
99 if cache:
108 name_key = _hash_key(repo_name)
100 name_key = _hash_key(repo_name)
109 repo = repo.options(
101 repo = repo.options(
110 FromCache("sql_cache_short", f"get_repo_{name_key}"))
102 FromCache("sql_cache_short", f"get_repo_{name_key}"))
111 return repo.scalar()
103 return repo.scalar()
112
104
113 def _extract_id_from_repo_name(self, repo_name):
105 def _extract_id_from_repo_name(self, repo_name):
114 if repo_name.startswith('/'):
106 if repo_name.startswith('/'):
115 repo_name = repo_name.lstrip('/')
107 repo_name = repo_name.lstrip('/')
116 by_id_match = re.match(r'^_(\d+)', repo_name)
108 by_id_match = re.match(r'^_(\d+)', repo_name)
117 if by_id_match:
109 if by_id_match:
118 return by_id_match.groups()[0]
110 return by_id_match.groups()[0]
119
111
120 def get_repo_by_id(self, repo_name):
112 def get_repo_by_id(self, repo_name):
121 """
113 """
122 Extracts repo_name by id from special urls.
114 Extracts repo_name by id from special urls.
123 Example url is _11/repo_name
115 Example url is _11/repo_name
124
116
125 :param repo_name:
117 :param repo_name:
126 :return: repo object if matched else None
118 :return: repo object if matched else None
127 """
119 """
128 _repo_id = None
120 _repo_id = None
129 try:
121 try:
130 _repo_id = self._extract_id_from_repo_name(repo_name)
122 _repo_id = self._extract_id_from_repo_name(repo_name)
131 if _repo_id:
123 if _repo_id:
132 return self.get(_repo_id)
124 return self.get(_repo_id)
133 except Exception:
125 except Exception:
134 log.exception('Failed to extract repo_name from URL')
126 log.exception('Failed to extract repo_name from URL')
135 if _repo_id:
127 if _repo_id:
136 Session().rollback()
128 Session().rollback()
137
129
138 return None
130 return None
139
131
140 def get_repos_for_root(self, root, traverse=False):
132 def get_repos_for_root(self, root, traverse=False):
141 if traverse:
133 if traverse:
142 like_expression = u'{}%'.format(safe_str(root))
134 like_expression = u'{}%'.format(safe_str(root))
143 repos = Repository.query().filter(
135 repos = Repository.query().filter(
144 Repository.repo_name.like(like_expression)).all()
136 Repository.repo_name.like(like_expression)).all()
145 else:
137 else:
146 if root and not isinstance(root, RepoGroup):
138 if root and not isinstance(root, RepoGroup):
147 raise ValueError(
139 raise ValueError(
148 'Root must be an instance '
140 'Root must be an instance '
149 'of RepoGroup, got:{} instead'.format(type(root)))
141 'of RepoGroup, got:{} instead'.format(type(root)))
150 repos = Repository.query().filter(Repository.group == root).all()
142 repos = Repository.query().filter(Repository.group == root).all()
151 return repos
143 return repos
152
144
153 def get_url(self, repo, request=None, permalink=False):
145 def get_url(self, repo, request=None, permalink=False):
154 if not request:
146 if not request:
155 request = get_current_request()
147 request = get_current_request()
156
148
157 if not request:
149 if not request:
158 return
150 return
159
151
160 if permalink:
152 if permalink:
161 return request.route_url(
153 return request.route_url(
162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
154 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
163 else:
155 else:
164 return request.route_url(
156 return request.route_url(
165 'repo_summary', repo_name=safe_str(repo.repo_name))
157 'repo_summary', repo_name=safe_str(repo.repo_name))
166
158
167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
159 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
168 if not request:
160 if not request:
169 request = get_current_request()
161 request = get_current_request()
170
162
171 if not request:
163 if not request:
172 return
164 return
173
165
174 if permalink:
166 if permalink:
175 return request.route_url(
167 return request.route_url(
176 'repo_commit', repo_name=safe_str(repo.repo_id),
168 'repo_commit', repo_name=safe_str(repo.repo_id),
177 commit_id=commit_id)
169 commit_id=commit_id)
178
170
179 else:
171 else:
180 return request.route_url(
172 return request.route_url(
181 'repo_commit', repo_name=safe_str(repo.repo_name),
173 'repo_commit', repo_name=safe_str(repo.repo_name),
182 commit_id=commit_id)
174 commit_id=commit_id)
183
175
184 def get_repo_log(self, repo, filter_term):
176 def get_repo_log(self, repo, filter_term):
185 repo_log = UserLog.query()\
177 repo_log = UserLog.query()\
186 .filter(or_(UserLog.repository_id == repo.repo_id,
178 .filter(or_(UserLog.repository_id == repo.repo_id,
187 UserLog.repository_name == repo.repo_name))\
179 UserLog.repository_name == repo.repo_name))\
188 .options(joinedload(UserLog.user))\
180 .options(joinedload(UserLog.user))\
189 .options(joinedload(UserLog.repository))\
181 .options(joinedload(UserLog.repository))\
190 .order_by(UserLog.action_date.desc())
182 .order_by(UserLog.action_date.desc())
191
183
192 repo_log = user_log_filter(repo_log, filter_term)
184 repo_log = user_log_filter(repo_log, filter_term)
193 return repo_log
185 return repo_log
194
186
195 @classmethod
187 @classmethod
196 def update_commit_cache(cls, repositories=None):
188 def update_commit_cache(cls, repositories=None):
197 if not repositories:
189 if not repositories:
198 repositories = Repository.getAll()
190 repositories = Repository.getAll()
199 for repo in repositories:
191 for repo in repositories:
200 repo.update_commit_cache()
192 repo.update_commit_cache()
201
193
202 def get_repos_as_dict(self, repo_list=None, admin=False,
194 def get_repos_as_dict(self, repo_list=None, admin=False,
203 super_user_actions=False, short_name=None):
195 super_user_actions=False, short_name=None):
204
196
205 _render = get_current_request().get_partial_renderer(
197 _render = get_current_request().get_partial_renderer(
206 'rhodecode:templates/data_table/_dt_elements.mako')
198 'rhodecode:templates/data_table/_dt_elements.mako')
207 c = _render.get_call_context()
199 c = _render.get_call_context()
208 h = _render.get_helpers()
200 h = _render.get_helpers()
209
201
210 def quick_menu(repo_name):
202 def quick_menu(repo_name):
211 return _render('quick_menu', repo_name)
203 return _render('quick_menu', repo_name)
212
204
213 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
205 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
214 if short_name is not None:
206 if short_name is not None:
215 short_name_var = short_name
207 short_name_var = short_name
216 else:
208 else:
217 short_name_var = not admin
209 short_name_var = not admin
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
210 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
219 short_name=short_name_var, admin=False)
211 short_name=short_name_var, admin=False)
220
212
221 def last_change(last_change):
213 def last_change(last_change):
222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
214 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
223 ts = time.time()
215 ts = time.time()
224 utc_offset = (datetime.datetime.fromtimestamp(ts)
216 utc_offset = (datetime.datetime.fromtimestamp(ts)
225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
217 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
218 last_change = last_change + datetime.timedelta(seconds=utc_offset)
227
219
228 return _render("last_change", last_change)
220 return _render("last_change", last_change)
229
221
230 def rss_lnk(repo_name):
222 def rss_lnk(repo_name):
231 return _render("rss", repo_name)
223 return _render("rss", repo_name)
232
224
233 def atom_lnk(repo_name):
225 def atom_lnk(repo_name):
234 return _render("atom", repo_name)
226 return _render("atom", repo_name)
235
227
236 def last_rev(repo_name, cs_cache):
228 def last_rev(repo_name, cs_cache):
237 return _render('revision', repo_name, cs_cache.get('revision'),
229 return _render('revision', repo_name, cs_cache.get('revision'),
238 cs_cache.get('raw_id'), cs_cache.get('author'),
230 cs_cache.get('raw_id'), cs_cache.get('author'),
239 cs_cache.get('message'), cs_cache.get('date'))
231 cs_cache.get('message'), cs_cache.get('date'))
240
232
241 def desc(desc):
233 def desc(desc):
242 return _render('repo_desc', desc, c.visual.stylify_metatags)
234 return _render('repo_desc', desc, c.visual.stylify_metatags)
243
235
244 def state(repo_state):
236 def state(repo_state):
245 return _render("repo_state", repo_state)
237 return _render("repo_state", repo_state)
246
238
247 def repo_actions(repo_name):
239 def repo_actions(repo_name):
248 return _render('repo_actions', repo_name, super_user_actions)
240 return _render('repo_actions', repo_name, super_user_actions)
249
241
250 def user_profile(username):
242 def user_profile(username):
251 return _render('user_profile', username)
243 return _render('user_profile', username)
252
244
253 repos_data = []
245 repos_data = []
254 for repo in repo_list:
246 for repo in repo_list:
255 # NOTE(marcink): because we use only raw column we need to load it like that
247 # NOTE(marcink): because we use only raw column we need to load it like that
256 changeset_cache = Repository._load_changeset_cache(
248 changeset_cache = Repository._load_changeset_cache(
257 repo.repo_id, repo._changeset_cache)
249 repo.repo_id, repo._changeset_cache)
258
250
259 row = {
251 row = {
260 "menu": quick_menu(repo.repo_name),
252 "menu": quick_menu(repo.repo_name),
261
253
262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
254 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
263 repo.private, repo.archived, repo.fork_repo_name),
255 repo.private, repo.archived, repo.fork_repo_name),
264
256
265 "desc": desc(h.escape(repo.description)),
257 "desc": desc(h.escape(repo.description)),
266
258
267 "last_change": last_change(repo.updated_on),
259 "last_change": last_change(repo.updated_on),
268
260
269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
261 "last_changeset": last_rev(repo.repo_name, changeset_cache),
270 "last_changeset_raw": changeset_cache.get('revision'),
262 "last_changeset_raw": changeset_cache.get('revision'),
271
263
272 "owner": user_profile(repo.owner_username),
264 "owner": user_profile(repo.owner_username),
273
265
274 "state": state(repo.repo_state),
266 "state": state(repo.repo_state),
275 "rss": rss_lnk(repo.repo_name),
267 "rss": rss_lnk(repo.repo_name),
276 "atom": atom_lnk(repo.repo_name),
268 "atom": atom_lnk(repo.repo_name),
277 }
269 }
278 if admin:
270 if admin:
279 row.update({
271 row.update({
280 "action": repo_actions(repo.repo_name),
272 "action": repo_actions(repo.repo_name),
281 })
273 })
282 repos_data.append(row)
274 repos_data.append(row)
283
275
284 return repos_data
276 return repos_data
285
277
286 def get_repos_data_table(
278 def get_repos_data_table(
287 self, draw, start, limit,
279 self, draw, start, limit,
288 search_q, order_by, order_dir,
280 search_q, order_by, order_dir,
289 auth_user, repo_group_id):
281 auth_user, repo_group_id):
290 from rhodecode.model.scm import RepoList
282 from rhodecode.model.scm import RepoList
291
283
292 _perms = ['repository.read', 'repository.write', 'repository.admin']
284 _perms = ['repository.read', 'repository.write', 'repository.admin']
293
285
294 repos = Repository.query() \
286 repos = Repository.query() \
295 .filter(Repository.group_id == repo_group_id) \
287 .filter(Repository.group_id == repo_group_id) \
296 .all()
288 .all()
297 auth_repo_list = RepoList(
289 auth_repo_list = RepoList(
298 repos, perm_set=_perms,
290 repos, perm_set=_perms,
299 extra_kwargs=dict(user=auth_user))
291 extra_kwargs=dict(user=auth_user))
300
292
301 allowed_ids = [-1]
293 allowed_ids = [-1]
302 for repo in auth_repo_list:
294 for repo in auth_repo_list:
303 allowed_ids.append(repo.repo_id)
295 allowed_ids.append(repo.repo_id)
304
296
305 repos_data_total_count = Repository.query() \
297 repos_data_total_count = Repository.query() \
306 .filter(Repository.group_id == repo_group_id) \
298 .filter(Repository.group_id == repo_group_id) \
307 .filter(or_(
299 .filter(or_(
308 # generate multiple IN to fix limitation problems
300 # generate multiple IN to fix limitation problems
309 *in_filter_generator(Repository.repo_id, allowed_ids))
301 *in_filter_generator(Repository.repo_id, allowed_ids))
310 ) \
302 ) \
311 .count()
303 .count()
312
304
313 RepoFork = aliased(Repository)
305 RepoFork = aliased(Repository)
314 OwnerUser = aliased(User)
306 OwnerUser = aliased(User)
315 base_q = Session.query(
307 base_q = Session.query(
316 Repository.repo_id,
308 Repository.repo_id,
317 Repository.repo_name,
309 Repository.repo_name,
318 Repository.description,
310 Repository.description,
319 Repository.repo_type,
311 Repository.repo_type,
320 Repository.repo_state,
312 Repository.repo_state,
321 Repository.private,
313 Repository.private,
322 Repository.archived,
314 Repository.archived,
323 Repository.updated_on,
315 Repository.updated_on,
324 Repository._changeset_cache,
316 Repository._changeset_cache,
325 RepoFork.repo_name.label('fork_repo_name'),
317 RepoFork.repo_name.label('fork_repo_name'),
326 OwnerUser.username.label('owner_username'),
318 OwnerUser.username.label('owner_username'),
327 ) \
319 ) \
328 .filter(Repository.group_id == repo_group_id) \
320 .filter(Repository.group_id == repo_group_id) \
329 .filter(or_(
321 .filter(or_(
330 # generate multiple IN to fix limitation problems
322 # generate multiple IN to fix limitation problems
331 *in_filter_generator(Repository.repo_id, allowed_ids))
323 *in_filter_generator(Repository.repo_id, allowed_ids))
332 ) \
324 ) \
333 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
325 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
334 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
326 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
335
327
336 repos_data_total_filtered_count = base_q.count()
328 repos_data_total_filtered_count = base_q.count()
337
329
338 sort_defined = False
330 sort_defined = False
339 if order_by == 'repo_name':
331 if order_by == 'repo_name':
340 sort_col = func.lower(Repository.repo_name)
332 sort_col = func.lower(Repository.repo_name)
341 sort_defined = True
333 sort_defined = True
342 elif order_by == 'user_username':
334 elif order_by == 'user_username':
343 sort_col = User.username
335 sort_col = User.username
344 else:
336 else:
345 sort_col = getattr(Repository, order_by, None)
337 sort_col = getattr(Repository, order_by, None)
346
338
347 if sort_defined or sort_col:
339 if sort_defined or sort_col:
348 if order_dir == 'asc':
340 if order_dir == 'asc':
349 sort_col = sort_col.asc()
341 sort_col = sort_col.asc()
350 else:
342 else:
351 sort_col = sort_col.desc()
343 sort_col = sort_col.desc()
352
344
353 base_q = base_q.order_by(sort_col)
345 base_q = base_q.order_by(sort_col)
354 base_q = base_q.offset(start).limit(limit)
346 base_q = base_q.offset(start).limit(limit)
355
347
356 repos_list = base_q.all()
348 repos_list = base_q.all()
357
349
358 repos_data = RepoModel().get_repos_as_dict(
350 repos_data = RepoModel().get_repos_as_dict(
359 repo_list=repos_list, admin=False)
351 repo_list=repos_list, admin=False)
360
352
361 data = ({
353 data = ({
362 'draw': draw,
354 'draw': draw,
363 'data': repos_data,
355 'data': repos_data,
364 'recordsTotal': repos_data_total_count,
356 'recordsTotal': repos_data_total_count,
365 'recordsFiltered': repos_data_total_filtered_count,
357 'recordsFiltered': repos_data_total_filtered_count,
366 })
358 })
367 return data
359 return data
368
360
369 def _get_defaults(self, repo_name):
361 def _get_defaults(self, repo_name):
370 """
362 """
371 Gets information about repository, and returns a dict for
363 Gets information about repository, and returns a dict for
372 usage in forms
364 usage in forms
373
365
374 :param repo_name:
366 :param repo_name:
375 """
367 """
376
368
377 repo_info = Repository.get_by_repo_name(repo_name)
369 repo_info = Repository.get_by_repo_name(repo_name)
378
370
379 if repo_info is None:
371 if repo_info is None:
380 return None
372 return None
381
373
382 defaults = repo_info.get_dict()
374 defaults = repo_info.get_dict()
383 defaults['repo_name'] = repo_info.just_name
375 defaults['repo_name'] = repo_info.just_name
384
376
385 groups = repo_info.groups_with_parents
377 groups = repo_info.groups_with_parents
386 parent_group = groups[-1] if groups else None
378 parent_group = groups[-1] if groups else None
387
379
388 # we use -1 as this is how in HTML, we mark an empty group
380 # we use -1 as this is how in HTML, we mark an empty group
389 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
381 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
390
382
391 keys_to_process = (
383 keys_to_process = (
392 {'k': 'repo_type', 'strip': False},
384 {'k': 'repo_type', 'strip': False},
393 {'k': 'repo_enable_downloads', 'strip': True},
385 {'k': 'repo_enable_downloads', 'strip': True},
394 {'k': 'repo_description', 'strip': True},
386 {'k': 'repo_description', 'strip': True},
395 {'k': 'repo_enable_locking', 'strip': True},
387 {'k': 'repo_enable_locking', 'strip': True},
396 {'k': 'repo_landing_rev', 'strip': True},
388 {'k': 'repo_landing_rev', 'strip': True},
397 {'k': 'clone_uri', 'strip': False},
389 {'k': 'clone_uri', 'strip': False},
398 {'k': 'push_uri', 'strip': False},
390 {'k': 'push_uri', 'strip': False},
399 {'k': 'repo_private', 'strip': True},
391 {'k': 'repo_private', 'strip': True},
400 {'k': 'repo_enable_statistics', 'strip': True}
392 {'k': 'repo_enable_statistics', 'strip': True}
401 )
393 )
402
394
403 for item in keys_to_process:
395 for item in keys_to_process:
404 attr = item['k']
396 attr = item['k']
405 if item['strip']:
397 if item['strip']:
406 attr = remove_prefix(item['k'], 'repo_')
398 attr = remove_prefix(item['k'], 'repo_')
407
399
408 val = defaults[attr]
400 val = defaults[attr]
409 if item['k'] == 'repo_landing_rev':
401 if item['k'] == 'repo_landing_rev':
410 val = ':'.join(defaults[attr])
402 val = ':'.join(defaults[attr])
411 defaults[item['k']] = val
403 defaults[item['k']] = val
412 if item['k'] == 'clone_uri':
404 if item['k'] == 'clone_uri':
413 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
405 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
414 if item['k'] == 'push_uri':
406 if item['k'] == 'push_uri':
415 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
407 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
416
408
417 # fill owner
409 # fill owner
418 if repo_info.user:
410 if repo_info.user:
419 defaults.update({'user': repo_info.user.username})
411 defaults.update({'user': repo_info.user.username})
420 else:
412 else:
421 replacement_user = User.get_first_super_admin().username
413 replacement_user = User.get_first_super_admin().username
422 defaults.update({'user': replacement_user})
414 defaults.update({'user': replacement_user})
423
415
424 return defaults
416 return defaults
425
417
426 def update(self, repo, **kwargs):
418 def update(self, repo, **kwargs):
427 try:
419 try:
428 cur_repo = self._get_repo(repo)
420 cur_repo = self._get_repo(repo)
429 source_repo_name = cur_repo.repo_name
421 source_repo_name = cur_repo.repo_name
430
422
431 affected_user_ids = []
423 affected_user_ids = []
432 if 'user' in kwargs:
424 if 'user' in kwargs:
433 old_owner_id = cur_repo.user.user_id
425 old_owner_id = cur_repo.user.user_id
434 new_owner = User.get_by_username(kwargs['user'])
426 new_owner = User.get_by_username(kwargs['user'])
435 cur_repo.user = new_owner
427 cur_repo.user = new_owner
436
428
437 if old_owner_id != new_owner.user_id:
429 if old_owner_id != new_owner.user_id:
438 affected_user_ids = [new_owner.user_id, old_owner_id]
430 affected_user_ids = [new_owner.user_id, old_owner_id]
439
431
440 if 'repo_group' in kwargs:
432 if 'repo_group' in kwargs:
441 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
433 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
442 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
434 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
443
435
444 update_keys = [
436 update_keys = [
445 (1, 'repo_description'),
437 (1, 'repo_description'),
446 (1, 'repo_landing_rev'),
438 (1, 'repo_landing_rev'),
447 (1, 'repo_private'),
439 (1, 'repo_private'),
448 (1, 'repo_enable_downloads'),
440 (1, 'repo_enable_downloads'),
449 (1, 'repo_enable_locking'),
441 (1, 'repo_enable_locking'),
450 (1, 'repo_enable_statistics'),
442 (1, 'repo_enable_statistics'),
451 (0, 'clone_uri'),
443 (0, 'clone_uri'),
452 (0, 'push_uri'),
444 (0, 'push_uri'),
453 (0, 'fork_id')
445 (0, 'fork_id')
454 ]
446 ]
455 for strip, k in update_keys:
447 for strip, k in update_keys:
456 if k in kwargs:
448 if k in kwargs:
457 val = kwargs[k]
449 val = kwargs[k]
458 if strip:
450 if strip:
459 k = remove_prefix(k, 'repo_')
451 k = remove_prefix(k, 'repo_')
460
452
461 setattr(cur_repo, k, val)
453 setattr(cur_repo, k, val)
462
454
463 new_name = cur_repo.get_new_name(kwargs['repo_name'])
455 new_name = cur_repo.get_new_name(kwargs['repo_name'])
464 cur_repo.repo_name = new_name
456 cur_repo.repo_name = new_name
465
457
466 # if private flag is set, reset default permission to NONE
458 # if private flag is set, reset default permission to NONE
467 if kwargs.get('repo_private'):
459 if kwargs.get('repo_private'):
468 EMPTY_PERM = 'repository.none'
460 EMPTY_PERM = 'repository.none'
469 RepoModel().grant_user_permission(
461 RepoModel().grant_user_permission(
470 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
462 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
471 )
463 )
472 if kwargs.get('repo_landing_rev'):
464 if kwargs.get('repo_landing_rev'):
473 landing_rev_val = kwargs['repo_landing_rev']
465 landing_rev_val = kwargs['repo_landing_rev']
474 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
466 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
475
467
476 # handle extra fields
468 # handle extra fields
477 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
469 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
478 k = RepositoryField.un_prefix_key(field)
470 k = RepositoryField.un_prefix_key(field)
479 ex_field = RepositoryField.get_by_key_name(
471 ex_field = RepositoryField.get_by_key_name(
480 key=k, repo=cur_repo)
472 key=k, repo=cur_repo)
481 if ex_field:
473 if ex_field:
482 ex_field.field_value = kwargs[field]
474 ex_field.field_value = kwargs[field]
483 self.sa.add(ex_field)
475 self.sa.add(ex_field)
484
476
485 self.sa.add(cur_repo)
477 self.sa.add(cur_repo)
486
478
487 if source_repo_name != new_name:
479 if source_repo_name != new_name:
488 # rename repository
480 # rename repository
489 self._rename_filesystem_repo(
481 self._rename_filesystem_repo(
490 old=source_repo_name, new=new_name)
482 old=source_repo_name, new=new_name)
491
483
492 if affected_user_ids:
484 if affected_user_ids:
493 PermissionModel().trigger_permission_flush(affected_user_ids)
485 PermissionModel().trigger_permission_flush(affected_user_ids)
494
486
495 return cur_repo
487 return cur_repo
496 except Exception:
488 except Exception:
497 log.error(traceback.format_exc())
489 log.error(traceback.format_exc())
498 raise
490 raise
499
491
500 def _create_repo(self, repo_name, repo_type, description, owner,
492 def _create_repo(self, repo_name, repo_type, description, owner,
501 private=False, clone_uri=None, repo_group=None,
493 private=False, clone_uri=None, repo_group=None,
502 landing_rev=None, fork_of=None,
494 landing_rev=None, fork_of=None,
503 copy_fork_permissions=False, enable_statistics=False,
495 copy_fork_permissions=False, enable_statistics=False,
504 enable_locking=False, enable_downloads=False,
496 enable_locking=False, enable_downloads=False,
505 copy_group_permissions=False,
497 copy_group_permissions=False,
506 state=Repository.STATE_PENDING):
498 state=Repository.STATE_PENDING):
507 """
499 """
508 Create repository inside database with PENDING state, this should be
500 Create repository inside database with PENDING state, this should be
509 only executed by create() repo. With exception of importing existing
501 only executed by create() repo. With exception of importing existing
510 repos
502 repos
511 """
503 """
512 from rhodecode.model.scm import ScmModel
504 from rhodecode.model.scm import ScmModel
513
505
514 owner = self._get_user(owner)
506 owner = self._get_user(owner)
515 fork_of = self._get_repo(fork_of)
507 fork_of = self._get_repo(fork_of)
516 repo_group = self._get_repo_group(safe_int(repo_group))
508 repo_group = self._get_repo_group(safe_int(repo_group))
517 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
509 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
518 landing_rev = landing_rev or default_landing_ref
510 landing_rev = landing_rev or default_landing_ref
519
511
520 try:
512 try:
521 repo_name = safe_str(repo_name)
513 repo_name = safe_str(repo_name)
522 description = safe_str(description)
514 description = safe_str(description)
523 # repo name is just a name of repository
515 # repo name is just a name of repository
524 # while repo_name_full is a full qualified name that is combined
516 # while repo_name_full is a full qualified name that is combined
525 # with name and path of group
517 # with name and path of group
526 repo_name_full = repo_name
518 repo_name_full = repo_name
527 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
519 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
528
520
529 new_repo = Repository()
521 new_repo = Repository()
530 new_repo.repo_state = state
522 new_repo.repo_state = state
531 new_repo.enable_statistics = False
523 new_repo.enable_statistics = False
532 new_repo.repo_name = repo_name_full
524 new_repo.repo_name = repo_name_full
533 new_repo.repo_type = repo_type
525 new_repo.repo_type = repo_type
534 new_repo.user = owner
526 new_repo.user = owner
535 new_repo.group = repo_group
527 new_repo.group = repo_group
536 new_repo.description = description or repo_name
528 new_repo.description = description or repo_name
537 new_repo.private = private
529 new_repo.private = private
538 new_repo.archived = False
530 new_repo.archived = False
539 new_repo.clone_uri = clone_uri
531 new_repo.clone_uri = clone_uri
540 new_repo.landing_rev = landing_rev
532 new_repo.landing_rev = landing_rev
541
533
542 new_repo.enable_statistics = enable_statistics
534 new_repo.enable_statistics = enable_statistics
543 new_repo.enable_locking = enable_locking
535 new_repo.enable_locking = enable_locking
544 new_repo.enable_downloads = enable_downloads
536 new_repo.enable_downloads = enable_downloads
545
537
546 if repo_group:
538 if repo_group:
547 new_repo.enable_locking = repo_group.enable_locking
539 new_repo.enable_locking = repo_group.enable_locking
548
540
549 if fork_of:
541 if fork_of:
550 parent_repo = fork_of
542 parent_repo = fork_of
551 new_repo.fork = parent_repo
543 new_repo.fork = parent_repo
552
544
553 events.trigger(events.RepoPreCreateEvent(new_repo))
545 events.trigger(events.RepoPreCreateEvent(new_repo))
554
546
555 self.sa.add(new_repo)
547 self.sa.add(new_repo)
556
548
557 EMPTY_PERM = 'repository.none'
549 EMPTY_PERM = 'repository.none'
558 if fork_of and copy_fork_permissions:
550 if fork_of and copy_fork_permissions:
559 repo = fork_of
551 repo = fork_of
560 user_perms = UserRepoToPerm.query() \
552 user_perms = UserRepoToPerm.query() \
561 .filter(UserRepoToPerm.repository == repo).all()
553 .filter(UserRepoToPerm.repository == repo).all()
562 group_perms = UserGroupRepoToPerm.query() \
554 group_perms = UserGroupRepoToPerm.query() \
563 .filter(UserGroupRepoToPerm.repository == repo).all()
555 .filter(UserGroupRepoToPerm.repository == repo).all()
564
556
565 for perm in user_perms:
557 for perm in user_perms:
566 UserRepoToPerm.create(
558 UserRepoToPerm.create(
567 perm.user, new_repo, perm.permission)
559 perm.user, new_repo, perm.permission)
568
560
569 for perm in group_perms:
561 for perm in group_perms:
570 UserGroupRepoToPerm.create(
562 UserGroupRepoToPerm.create(
571 perm.users_group, new_repo, perm.permission)
563 perm.users_group, new_repo, perm.permission)
572 # in case we copy permissions and also set this repo to private
564 # in case we copy permissions and also set this repo to private
573 # override the default user permission to make it a private repo
565 # override the default user permission to make it a private repo
574 if private:
566 if private:
575 RepoModel(self.sa).grant_user_permission(
567 RepoModel(self.sa).grant_user_permission(
576 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
568 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
577
569
578 elif repo_group and copy_group_permissions:
570 elif repo_group and copy_group_permissions:
579 user_perms = UserRepoGroupToPerm.query() \
571 user_perms = UserRepoGroupToPerm.query() \
580 .filter(UserRepoGroupToPerm.group == repo_group).all()
572 .filter(UserRepoGroupToPerm.group == repo_group).all()
581
573
582 group_perms = UserGroupRepoGroupToPerm.query() \
574 group_perms = UserGroupRepoGroupToPerm.query() \
583 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
575 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
584
576
585 for perm in user_perms:
577 for perm in user_perms:
586 perm_name = perm.permission.permission_name.replace(
578 perm_name = perm.permission.permission_name.replace(
587 'group.', 'repository.')
579 'group.', 'repository.')
588 perm_obj = Permission.get_by_key(perm_name)
580 perm_obj = Permission.get_by_key(perm_name)
589 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
581 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
590
582
591 for perm in group_perms:
583 for perm in group_perms:
592 perm_name = perm.permission.permission_name.replace(
584 perm_name = perm.permission.permission_name.replace(
593 'group.', 'repository.')
585 'group.', 'repository.')
594 perm_obj = Permission.get_by_key(perm_name)
586 perm_obj = Permission.get_by_key(perm_name)
595 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
587 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
596
588
597 if private:
589 if private:
598 RepoModel(self.sa).grant_user_permission(
590 RepoModel(self.sa).grant_user_permission(
599 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
591 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
600
592
601 else:
593 else:
602 perm_obj = self._create_default_perms(new_repo, private)
594 perm_obj = self._create_default_perms(new_repo, private)
603 self.sa.add(perm_obj)
595 self.sa.add(perm_obj)
604
596
605 # now automatically start following this repository as owner
597 # now automatically start following this repository as owner
606 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
598 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
607
599
608 # we need to flush here, in order to check if database won't
600 # we need to flush here, in order to check if database won't
609 # throw any exceptions, create filesystem dirs at the very end
601 # throw any exceptions, create filesystem dirs at the very end
610 self.sa.flush()
602 self.sa.flush()
611 events.trigger(events.RepoCreateEvent(new_repo, actor=owner))
603 events.trigger(events.RepoCreateEvent(new_repo, actor=owner))
612 return new_repo
604 return new_repo
613
605
614 except Exception:
606 except Exception:
615 log.error(traceback.format_exc())
607 log.error(traceback.format_exc())
616 raise
608 raise
617
609
618 def create(self, form_data, cur_user):
610 def create(self, form_data, cur_user):
619 """
611 """
620 Create repository using celery tasks
612 Create repository using celery tasks
621
613
622 :param form_data:
614 :param form_data:
623 :param cur_user:
615 :param cur_user:
624 """
616 """
625 from rhodecode.lib.celerylib import tasks, run_task
617 from rhodecode.lib.celerylib import tasks, run_task
626 return run_task(tasks.create_repo, form_data, cur_user)
618 return run_task(tasks.create_repo, form_data, cur_user)
627
619
628 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
620 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
629 perm_deletions=None, check_perms=True,
621 perm_deletions=None, check_perms=True,
630 cur_user=None):
622 cur_user=None):
631 if not perm_additions:
623 if not perm_additions:
632 perm_additions = []
624 perm_additions = []
633 if not perm_updates:
625 if not perm_updates:
634 perm_updates = []
626 perm_updates = []
635 if not perm_deletions:
627 if not perm_deletions:
636 perm_deletions = []
628 perm_deletions = []
637
629
638 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
630 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
639
631
640 changes = {
632 changes = {
641 'added': [],
633 'added': [],
642 'updated': [],
634 'updated': [],
643 'deleted': [],
635 'deleted': [],
644 'default_user_changed': None
636 'default_user_changed': None
645 }
637 }
646
638
647 repo = self._get_repo(repo)
639 repo = self._get_repo(repo)
648
640
649 # update permissions
641 # update permissions
650 for member_id, perm, member_type in perm_updates:
642 for member_id, perm, member_type in perm_updates:
651 member_id = int(member_id)
643 member_id = int(member_id)
652 if member_type == 'user':
644 if member_type == 'user':
653 member_name = User.get(member_id).username
645 member_name = User.get(member_id).username
654 if member_name == User.DEFAULT_USER:
646 if member_name == User.DEFAULT_USER:
655 # NOTE(dan): detect if we changed permissions for default user
647 # NOTE(dan): detect if we changed permissions for default user
656 perm_obj = self.sa.query(UserRepoToPerm) \
648 perm_obj = self.sa.query(UserRepoToPerm) \
657 .filter(UserRepoToPerm.user_id == member_id) \
649 .filter(UserRepoToPerm.user_id == member_id) \
658 .filter(UserRepoToPerm.repository == repo) \
650 .filter(UserRepoToPerm.repository == repo) \
659 .scalar()
651 .scalar()
660 if perm_obj and perm_obj.permission.permission_name != perm:
652 if perm_obj and perm_obj.permission.permission_name != perm:
661 changes['default_user_changed'] = True
653 changes['default_user_changed'] = True
662
654
663 # this updates also current one if found
655 # this updates also current one if found
664 self.grant_user_permission(
656 self.grant_user_permission(
665 repo=repo, user=member_id, perm=perm)
657 repo=repo, user=member_id, perm=perm)
666 elif member_type == 'user_group':
658 elif member_type == 'user_group':
667 # check if we have permissions to alter this usergroup
659 # check if we have permissions to alter this usergroup
668 member_name = UserGroup.get(member_id).users_group_name
660 member_name = UserGroup.get(member_id).users_group_name
669 if not check_perms or HasUserGroupPermissionAny(
661 if not check_perms or HasUserGroupPermissionAny(
670 *req_perms)(member_name, user=cur_user):
662 *req_perms)(member_name, user=cur_user):
671 self.grant_user_group_permission(
663 self.grant_user_group_permission(
672 repo=repo, group_name=member_id, perm=perm)
664 repo=repo, group_name=member_id, perm=perm)
673 else:
665 else:
674 raise ValueError("member_type must be 'user' or 'user_group' "
666 raise ValueError("member_type must be 'user' or 'user_group' "
675 "got {} instead".format(member_type))
667 "got {} instead".format(member_type))
676 changes['updated'].append({'type': member_type, 'id': member_id,
668 changes['updated'].append({'type': member_type, 'id': member_id,
677 'name': member_name, 'new_perm': perm})
669 'name': member_name, 'new_perm': perm})
678
670
679 # set new permissions
671 # set new permissions
680 for member_id, perm, member_type in perm_additions:
672 for member_id, perm, member_type in perm_additions:
681 member_id = int(member_id)
673 member_id = int(member_id)
682 if member_type == 'user':
674 if member_type == 'user':
683 member_name = User.get(member_id).username
675 member_name = User.get(member_id).username
684 self.grant_user_permission(
676 self.grant_user_permission(
685 repo=repo, user=member_id, perm=perm)
677 repo=repo, user=member_id, perm=perm)
686 elif member_type == 'user_group':
678 elif member_type == 'user_group':
687 # check if we have permissions to alter this usergroup
679 # check if we have permissions to alter this usergroup
688 member_name = UserGroup.get(member_id).users_group_name
680 member_name = UserGroup.get(member_id).users_group_name
689 if not check_perms or HasUserGroupPermissionAny(
681 if not check_perms or HasUserGroupPermissionAny(
690 *req_perms)(member_name, user=cur_user):
682 *req_perms)(member_name, user=cur_user):
691 self.grant_user_group_permission(
683 self.grant_user_group_permission(
692 repo=repo, group_name=member_id, perm=perm)
684 repo=repo, group_name=member_id, perm=perm)
693 else:
685 else:
694 raise ValueError("member_type must be 'user' or 'user_group' "
686 raise ValueError("member_type must be 'user' or 'user_group' "
695 "got {} instead".format(member_type))
687 "got {} instead".format(member_type))
696
688
697 changes['added'].append({'type': member_type, 'id': member_id,
689 changes['added'].append({'type': member_type, 'id': member_id,
698 'name': member_name, 'new_perm': perm})
690 'name': member_name, 'new_perm': perm})
699 # delete permissions
691 # delete permissions
700 for member_id, perm, member_type in perm_deletions:
692 for member_id, perm, member_type in perm_deletions:
701 member_id = int(member_id)
693 member_id = int(member_id)
702 if member_type == 'user':
694 if member_type == 'user':
703 member_name = User.get(member_id).username
695 member_name = User.get(member_id).username
704 self.revoke_user_permission(repo=repo, user=member_id)
696 self.revoke_user_permission(repo=repo, user=member_id)
705 elif member_type == 'user_group':
697 elif member_type == 'user_group':
706 # check if we have permissions to alter this usergroup
698 # check if we have permissions to alter this usergroup
707 member_name = UserGroup.get(member_id).users_group_name
699 member_name = UserGroup.get(member_id).users_group_name
708 if not check_perms or HasUserGroupPermissionAny(
700 if not check_perms or HasUserGroupPermissionAny(
709 *req_perms)(member_name, user=cur_user):
701 *req_perms)(member_name, user=cur_user):
710 self.revoke_user_group_permission(
702 self.revoke_user_group_permission(
711 repo=repo, group_name=member_id)
703 repo=repo, group_name=member_id)
712 else:
704 else:
713 raise ValueError("member_type must be 'user' or 'user_group' "
705 raise ValueError("member_type must be 'user' or 'user_group' "
714 "got {} instead".format(member_type))
706 "got {} instead".format(member_type))
715
707
716 changes['deleted'].append({'type': member_type, 'id': member_id,
708 changes['deleted'].append({'type': member_type, 'id': member_id,
717 'name': member_name, 'new_perm': perm})
709 'name': member_name, 'new_perm': perm})
718 return changes
710 return changes
719
711
720 def create_fork(self, form_data, cur_user):
712 def create_fork(self, form_data, cur_user):
721 """
713 """
722 Simple wrapper into executing celery task for fork creation
714 Simple wrapper into executing celery task for fork creation
723
715
724 :param form_data:
716 :param form_data:
725 :param cur_user:
717 :param cur_user:
726 """
718 """
727 from rhodecode.lib.celerylib import tasks, run_task
719 from rhodecode.lib.celerylib import tasks, run_task
728 return run_task(tasks.create_repo_fork, form_data, cur_user)
720 return run_task(tasks.create_repo_fork, form_data, cur_user)
729
721
730 def archive(self, repo):
722 def archive(self, repo):
731 """
723 """
732 Archive given repository. Set archive flag.
724 Archive given repository. Set archive flag.
733
725
734 :param repo:
726 :param repo:
735 """
727 """
736 repo = self._get_repo(repo)
728 repo = self._get_repo(repo)
737 if repo:
729 if repo:
738
730
739 try:
731 try:
740 repo.archived = True
732 repo.archived = True
741 self.sa.add(repo)
733 self.sa.add(repo)
742 self.sa.commit()
734 self.sa.commit()
743 except Exception:
735 except Exception:
744 log.error(traceback.format_exc())
736 log.error(traceback.format_exc())
745 raise
737 raise
746
738
747 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
739 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
748 """
740 """
749 Delete given repository, forks parameter defines what do do with
741 Delete given repository, forks parameter defines what do do with
750 attached forks. Throws AttachedForksError if deleted repo has attached
742 attached forks. Throws AttachedForksError if deleted repo has attached
751 forks
743 forks
752
744
753 :param repo:
745 :param repo:
754 :param forks: str 'delete' or 'detach'
746 :param forks: str 'delete' or 'detach'
755 :param pull_requests: str 'delete' or None
747 :param pull_requests: str 'delete' or None
756 :param fs_remove: remove(archive) repo from filesystem
748 :param fs_remove: remove(archive) repo from filesystem
757 """
749 """
758 if not cur_user:
750 if not cur_user:
759 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
751 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
760 repo = self._get_repo(repo)
752 repo = self._get_repo(repo)
761 if repo:
753 if repo:
762 if forks == 'detach':
754 if forks == 'detach':
763 for r in repo.forks:
755 for r in repo.forks:
764 r.fork = None
756 r.fork = None
765 self.sa.add(r)
757 self.sa.add(r)
766 elif forks == 'delete':
758 elif forks == 'delete':
767 for r in repo.forks:
759 for r in repo.forks:
768 self.delete(r, forks='delete')
760 self.delete(r, forks='delete')
769 elif [f for f in repo.forks]:
761 elif [f for f in repo.forks]:
770 raise AttachedForksError()
762 raise AttachedForksError()
771
763
772 # check for pull requests
764 # check for pull requests
773 pr_sources = repo.pull_requests_source
765 pr_sources = repo.pull_requests_source
774 pr_targets = repo.pull_requests_target
766 pr_targets = repo.pull_requests_target
775 if pull_requests != 'delete' and (pr_sources or pr_targets):
767 if pull_requests != 'delete' and (pr_sources or pr_targets):
776 raise AttachedPullRequestsError()
768 raise AttachedPullRequestsError()
777
769
778 old_repo_dict = repo.get_dict()
770 old_repo_dict = repo.get_dict()
779 events.trigger(events.RepoPreDeleteEvent(repo))
771 events.trigger(events.RepoPreDeleteEvent(repo))
780 try:
772 try:
781 self.sa.delete(repo)
773 self.sa.delete(repo)
782 if fs_remove:
774 if fs_remove:
783 self._delete_filesystem_repo(repo)
775 self._delete_filesystem_repo(repo)
784 else:
776 else:
785 log.debug('skipping removal from filesystem')
777 log.debug('skipping removal from filesystem')
786 old_repo_dict.update({
778 old_repo_dict.update({
787 'deleted_by': cur_user,
779 'deleted_by': cur_user,
788 'deleted_on': time.time(),
780 'deleted_on': time.time(),
789 })
781 })
790 hooks_base.delete_repository(**old_repo_dict)
782 hooks_base.delete_repository(**old_repo_dict)
791 events.trigger(events.RepoDeleteEvent(repo))
783 events.trigger(events.RepoDeleteEvent(repo))
792 except Exception:
784 except Exception:
793 log.error(traceback.format_exc())
785 log.error(traceback.format_exc())
794 raise
786 raise
795
787
796 def grant_user_permission(self, repo, user, perm):
788 def grant_user_permission(self, repo, user, perm):
797 """
789 """
798 Grant permission for user on given repository, or update existing one
790 Grant permission for user on given repository, or update existing one
799 if found
791 if found
800
792
801 :param repo: Instance of Repository, repository_id, or repository name
793 :param repo: Instance of Repository, repository_id, or repository name
802 :param user: Instance of User, user_id or username
794 :param user: Instance of User, user_id or username
803 :param perm: Instance of Permission, or permission_name
795 :param perm: Instance of Permission, or permission_name
804 """
796 """
805 user = self._get_user(user)
797 user = self._get_user(user)
806 repo = self._get_repo(repo)
798 repo = self._get_repo(repo)
807 permission = self._get_perm(perm)
799 permission = self._get_perm(perm)
808
800
809 # check if we have that permission already
801 # check if we have that permission already
810 obj = self.sa.query(UserRepoToPerm) \
802 obj = self.sa.query(UserRepoToPerm) \
811 .filter(UserRepoToPerm.user == user) \
803 .filter(UserRepoToPerm.user == user) \
812 .filter(UserRepoToPerm.repository == repo) \
804 .filter(UserRepoToPerm.repository == repo) \
813 .scalar()
805 .scalar()
814 if obj is None:
806 if obj is None:
815 # create new !
807 # create new !
816 obj = UserRepoToPerm()
808 obj = UserRepoToPerm()
817 obj.repository = repo
809 obj.repository = repo
818 obj.user = user
810 obj.user = user
819 obj.permission = permission
811 obj.permission = permission
820 self.sa.add(obj)
812 self.sa.add(obj)
821 log.debug('Granted perm %s to %s on %s', perm, user, repo)
813 log.debug('Granted perm %s to %s on %s', perm, user, repo)
822 action_logger_generic(
814 action_logger_generic(
823 'granted permission: {} to user: {} on repo: {}'.format(
815 'granted permission: {} to user: {} on repo: {}'.format(
824 perm, user, repo), namespace='security.repo')
816 perm, user, repo), namespace='security.repo')
825 return obj
817 return obj
826
818
827 def revoke_user_permission(self, repo, user):
819 def revoke_user_permission(self, repo, user):
828 """
820 """
829 Revoke permission for user on given repository
821 Revoke permission for user on given repository
830
822
831 :param repo: Instance of Repository, repository_id, or repository name
823 :param repo: Instance of Repository, repository_id, or repository name
832 :param user: Instance of User, user_id or username
824 :param user: Instance of User, user_id or username
833 """
825 """
834
826
835 user = self._get_user(user)
827 user = self._get_user(user)
836 repo = self._get_repo(repo)
828 repo = self._get_repo(repo)
837
829
838 obj = self.sa.query(UserRepoToPerm) \
830 obj = self.sa.query(UserRepoToPerm) \
839 .filter(UserRepoToPerm.repository == repo) \
831 .filter(UserRepoToPerm.repository == repo) \
840 .filter(UserRepoToPerm.user == user) \
832 .filter(UserRepoToPerm.user == user) \
841 .scalar()
833 .scalar()
842 if obj:
834 if obj:
843 self.sa.delete(obj)
835 self.sa.delete(obj)
844 log.debug('Revoked perm on %s on %s', repo, user)
836 log.debug('Revoked perm on %s on %s', repo, user)
845 action_logger_generic(
837 action_logger_generic(
846 'revoked permission from user: {} on repo: {}'.format(
838 'revoked permission from user: {} on repo: {}'.format(
847 user, repo), namespace='security.repo')
839 user, repo), namespace='security.repo')
848
840
849 def grant_user_group_permission(self, repo, group_name, perm):
841 def grant_user_group_permission(self, repo, group_name, perm):
850 """
842 """
851 Grant permission for user group on given repository, or update
843 Grant permission for user group on given repository, or update
852 existing one if found
844 existing one if found
853
845
854 :param repo: Instance of Repository, repository_id, or repository name
846 :param repo: Instance of Repository, repository_id, or repository name
855 :param group_name: Instance of UserGroup, users_group_id,
847 :param group_name: Instance of UserGroup, users_group_id,
856 or user group name
848 or user group name
857 :param perm: Instance of Permission, or permission_name
849 :param perm: Instance of Permission, or permission_name
858 """
850 """
859 repo = self._get_repo(repo)
851 repo = self._get_repo(repo)
860 group_name = self._get_user_group(group_name)
852 group_name = self._get_user_group(group_name)
861 permission = self._get_perm(perm)
853 permission = self._get_perm(perm)
862
854
863 # check if we have that permission already
855 # check if we have that permission already
864 obj = self.sa.query(UserGroupRepoToPerm) \
856 obj = self.sa.query(UserGroupRepoToPerm) \
865 .filter(UserGroupRepoToPerm.users_group == group_name) \
857 .filter(UserGroupRepoToPerm.users_group == group_name) \
866 .filter(UserGroupRepoToPerm.repository == repo) \
858 .filter(UserGroupRepoToPerm.repository == repo) \
867 .scalar()
859 .scalar()
868
860
869 if obj is None:
861 if obj is None:
870 # create new
862 # create new
871 obj = UserGroupRepoToPerm()
863 obj = UserGroupRepoToPerm()
872
864
873 obj.repository = repo
865 obj.repository = repo
874 obj.users_group = group_name
866 obj.users_group = group_name
875 obj.permission = permission
867 obj.permission = permission
876 self.sa.add(obj)
868 self.sa.add(obj)
877 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
869 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
878 action_logger_generic(
870 action_logger_generic(
879 'granted permission: {} to usergroup: {} on repo: {}'.format(
871 'granted permission: {} to usergroup: {} on repo: {}'.format(
880 perm, group_name, repo), namespace='security.repo')
872 perm, group_name, repo), namespace='security.repo')
881
873
882 return obj
874 return obj
883
875
884 def revoke_user_group_permission(self, repo, group_name):
876 def revoke_user_group_permission(self, repo, group_name):
885 """
877 """
886 Revoke permission for user group on given repository
878 Revoke permission for user group on given repository
887
879
888 :param repo: Instance of Repository, repository_id, or repository name
880 :param repo: Instance of Repository, repository_id, or repository name
889 :param group_name: Instance of UserGroup, users_group_id,
881 :param group_name: Instance of UserGroup, users_group_id,
890 or user group name
882 or user group name
891 """
883 """
892 repo = self._get_repo(repo)
884 repo = self._get_repo(repo)
893 group_name = self._get_user_group(group_name)
885 group_name = self._get_user_group(group_name)
894
886
895 obj = self.sa.query(UserGroupRepoToPerm) \
887 obj = self.sa.query(UserGroupRepoToPerm) \
896 .filter(UserGroupRepoToPerm.repository == repo) \
888 .filter(UserGroupRepoToPerm.repository == repo) \
897 .filter(UserGroupRepoToPerm.users_group == group_name) \
889 .filter(UserGroupRepoToPerm.users_group == group_name) \
898 .scalar()
890 .scalar()
899 if obj:
891 if obj:
900 self.sa.delete(obj)
892 self.sa.delete(obj)
901 log.debug('Revoked perm to %s on %s', repo, group_name)
893 log.debug('Revoked perm to %s on %s', repo, group_name)
902 action_logger_generic(
894 action_logger_generic(
903 'revoked permission from usergroup: {} on repo: {}'.format(
895 'revoked permission from usergroup: {} on repo: {}'.format(
904 group_name, repo), namespace='security.repo')
896 group_name, repo), namespace='security.repo')
905
897
906 def delete_stats(self, repo_name):
898 def delete_stats(self, repo_name):
907 """
899 """
908 removes stats for given repo
900 removes stats for given repo
909
901
910 :param repo_name:
902 :param repo_name:
911 """
903 """
912 repo = self._get_repo(repo_name)
904 repo = self._get_repo(repo_name)
913 try:
905 try:
914 obj = self.sa.query(Statistics) \
906 obj = self.sa.query(Statistics) \
915 .filter(Statistics.repository == repo).scalar()
907 .filter(Statistics.repository == repo).scalar()
916 if obj:
908 if obj:
917 self.sa.delete(obj)
909 self.sa.delete(obj)
918 except Exception:
910 except Exception:
919 log.error(traceback.format_exc())
911 log.error(traceback.format_exc())
920 raise
912 raise
921
913
922 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
914 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
923 field_type='str', field_desc=''):
915 field_type='str', field_desc=''):
924
916
925 repo = self._get_repo(repo_name)
917 repo = self._get_repo(repo_name)
926
918
927 new_field = RepositoryField()
919 new_field = RepositoryField()
928 new_field.repository = repo
920 new_field.repository = repo
929 new_field.field_key = field_key
921 new_field.field_key = field_key
930 new_field.field_type = field_type # python type
922 new_field.field_type = field_type # python type
931 new_field.field_value = field_value
923 new_field.field_value = field_value
932 new_field.field_desc = field_desc
924 new_field.field_desc = field_desc
933 new_field.field_label = field_label
925 new_field.field_label = field_label
934 self.sa.add(new_field)
926 self.sa.add(new_field)
935 return new_field
927 return new_field
936
928
937 def delete_repo_field(self, repo_name, field_key):
929 def delete_repo_field(self, repo_name, field_key):
938 repo = self._get_repo(repo_name)
930 repo = self._get_repo(repo_name)
939 field = RepositoryField.get_by_key_name(field_key, repo)
931 field = RepositoryField.get_by_key_name(field_key, repo)
940 if field:
932 if field:
941 self.sa.delete(field)
933 self.sa.delete(field)
942
934
943 def set_landing_rev(self, repo, landing_rev_name):
935 def set_landing_rev(self, repo, landing_rev_name):
944 if landing_rev_name.startswith('branch:'):
936 if landing_rev_name.startswith('branch:'):
945 landing_rev_name = landing_rev_name.split('branch:')[-1]
937 landing_rev_name = landing_rev_name.split('branch:')[-1]
946 scm_instance = repo.scm_instance()
938 scm_instance = repo.scm_instance()
947 if scm_instance:
939 if scm_instance:
948 return scm_instance._remote.set_head_ref(landing_rev_name)
940 return scm_instance._remote.set_head_ref(landing_rev_name)
949
941
950 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
942 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
951 clone_uri=None, repo_store_location=None,
943 clone_uri=None, repo_store_location=None,
952 use_global_config=False, install_hooks=True):
944 use_global_config=False, install_hooks=True):
953 """
945 """
954 makes repository on filesystem. It's group aware means it'll create
946 makes repository on filesystem. It's group aware means it'll create
955 a repository within a group, and alter the paths accordingly of
947 a repository within a group, and alter the paths accordingly of
956 group location
948 group location
957
949
958 :param repo_name:
950 :param repo_name:
959 :param alias:
951 :param alias:
960 :param parent:
952 :param parent:
961 :param clone_uri:
953 :param clone_uri:
962 :param repo_store_location:
954 :param repo_store_location:
963 """
955 """
964 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
956 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
965 from rhodecode.model.scm import ScmModel
957 from rhodecode.model.scm import ScmModel
966
958
967 if Repository.NAME_SEP in repo_name:
959 if Repository.NAME_SEP in repo_name:
968 raise ValueError(
960 raise ValueError(
969 'repo_name must not contain groups got `%s`' % repo_name)
961 'repo_name must not contain groups got `%s`' % repo_name)
970
962
971 if isinstance(repo_group, RepoGroup):
963 if isinstance(repo_group, RepoGroup):
972 new_parent_path = os.sep.join(repo_group.full_path_splitted)
964 new_parent_path = os.sep.join(repo_group.full_path_splitted)
973 else:
965 else:
974 new_parent_path = repo_group or ''
966 new_parent_path = repo_group or ''
975
967
976 if repo_store_location:
968 if repo_store_location:
977 _paths = [repo_store_location]
969 _paths = [repo_store_location]
978 else:
970 else:
979 _paths = [self.repos_path, new_parent_path, repo_name]
971 _paths = [self.repos_path, new_parent_path, repo_name]
980 # we need to make it str for mercurial
972 # we need to make it str for mercurial
981 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
973 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
982
974
983 # check if this path is not a repository
975 # check if this path is not a repository
984 if is_valid_repo(repo_path, self.repos_path):
976 if is_valid_repo(repo_path, self.repos_path):
985 raise Exception(f'This path {repo_path} is a valid repository')
977 raise Exception(f'This path {repo_path} is a valid repository')
986
978
987 # check if this path is a group
979 # check if this path is a group
988 if is_valid_repo_group(repo_path, self.repos_path):
980 if is_valid_repo_group(repo_path, self.repos_path):
989 raise Exception(f'This path {repo_path} is a valid group')
981 raise Exception(f'This path {repo_path} is a valid group')
990
982
991 log.info('creating repo %s in %s from url: `%s`',
983 log.info('creating repo %s in %s from url: `%s`',
992 repo_name, safe_str(repo_path),
984 repo_name, safe_str(repo_path),
993 obfuscate_url_pw(clone_uri))
985 obfuscate_url_pw(clone_uri))
994
986
995 backend = get_backend(repo_type)
987 backend = get_backend(repo_type)
996
988
997 config_repo = None if use_global_config else repo_name
989 config_repo = None if use_global_config else repo_name
998 if config_repo and new_parent_path:
990 if config_repo and new_parent_path:
999 config_repo = Repository.NAME_SEP.join(
991 config_repo = Repository.NAME_SEP.join(
1000 (new_parent_path, config_repo))
992 (new_parent_path, config_repo))
1001 config = make_db_config(clear_session=False, repo=config_repo)
993 config = make_db_config(clear_session=False, repo=config_repo)
1002 config.set('extensions', 'largefiles', '')
994 config.set('extensions', 'largefiles', '')
1003
995
1004 # patch and reset hooks section of UI config to not run any
996 # patch and reset hooks section of UI config to not run any
1005 # hooks on creating remote repo
997 # hooks on creating remote repo
1006 config.clear_section('hooks')
998 config.clear_section('hooks')
1007
999
1008 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1000 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1009 if repo_type == 'git':
1001 if repo_type == 'git':
1010 repo = backend(
1002 repo = backend(
1011 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1003 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1012 with_wire={"cache": False})
1004 with_wire={"cache": False})
1013 else:
1005 else:
1014 repo = backend(
1006 repo = backend(
1015 repo_path, config=config, create=True, src_url=clone_uri,
1007 repo_path, config=config, create=True, src_url=clone_uri,
1016 with_wire={"cache": False})
1008 with_wire={"cache": False})
1017
1009
1018 if install_hooks:
1010 if install_hooks:
1019 repo.install_hooks()
1011 repo.install_hooks()
1020
1012
1021 log.debug('Created repo %s with %s backend',
1013 log.debug('Created repo %s with %s backend',
1022 safe_str(repo_name), safe_str(repo_type))
1014 safe_str(repo_name), safe_str(repo_type))
1023 return repo
1015 return repo
1024
1016
1025 def _rename_filesystem_repo(self, old, new):
1017 def _rename_filesystem_repo(self, old, new):
1026 """
1018 """
1027 renames repository on filesystem
1019 renames repository on filesystem
1028
1020
1029 :param old: old name
1021 :param old: old name
1030 :param new: new name
1022 :param new: new name
1031 """
1023 """
1032 log.info('renaming repo from %s to %s', old, new)
1024 log.info('renaming repo from %s to %s', old, new)
1033
1025
1034 old_path = os.path.join(self.repos_path, old)
1026 old_path = os.path.join(self.repos_path, old)
1035 new_path = os.path.join(self.repos_path, new)
1027 new_path = os.path.join(self.repos_path, new)
1036 if os.path.isdir(new_path):
1028 if os.path.isdir(new_path):
1037 raise Exception(
1029 raise Exception(
1038 'Was trying to rename to already existing dir %s' % new_path
1030 'Was trying to rename to already existing dir %s' % new_path
1039 )
1031 )
1040 shutil.move(old_path, new_path)
1032 shutil.move(old_path, new_path)
1041
1033
1042 def _delete_filesystem_repo(self, repo):
1034 def _delete_filesystem_repo(self, repo):
1043 """
1035 """
1044 removes repo from filesystem, the removal is actually made by
1036 removes repo from filesystem, the removal is actually made by
1045 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1037 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1046 repository is no longer valid for rhodecode, can be undeleted later on
1038 repository is no longer valid for rhodecode, can be undeleted later on
1047 by reverting the renames on this repository
1039 by reverting the renames on this repository
1048
1040
1049 :param repo: repo object
1041 :param repo: repo object
1050 """
1042 """
1051 rm_path = os.path.join(self.repos_path, repo.repo_name)
1043 rm_path = os.path.join(self.repos_path, repo.repo_name)
1052 repo_group = repo.group
1044 repo_group = repo.group
1053 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1045 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1054 # disable hg/git internal that it doesn't get detected as repo
1046 # disable hg/git internal that it doesn't get detected as repo
1055 alias = repo.repo_type
1047 alias = repo.repo_type
1056
1048
1057 config = make_db_config(clear_session=False)
1049 config = make_db_config(clear_session=False)
1058 config.set('extensions', 'largefiles', '')
1050 config.set('extensions', 'largefiles', '')
1059 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1051 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1060
1052
1061 # skip this for bare git repos
1053 # skip this for bare git repos
1062 if not bare:
1054 if not bare:
1063 # disable VCS repo
1055 # disable VCS repo
1064 vcs_path = os.path.join(rm_path, '.%s' % alias)
1056 vcs_path = os.path.join(rm_path, '.%s' % alias)
1065 if os.path.exists(vcs_path):
1057 if os.path.exists(vcs_path):
1066 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1058 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1067
1059
1068 _now = datetime.datetime.now()
1060 _now = datetime.datetime.now()
1069 _ms = str(_now.microsecond).rjust(6, '0')
1061 _ms = str(_now.microsecond).rjust(6, '0')
1070 _d = 'rm__{}__{}'.format(_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1062 _d = 'rm__{}__{}'.format(_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1071 repo.just_name)
1063 repo.just_name)
1072 if repo_group:
1064 if repo_group:
1073 # if repository is in group, prefix the removal path with the group
1065 # if repository is in group, prefix the removal path with the group
1074 args = repo_group.full_path_splitted + [_d]
1066 args = repo_group.full_path_splitted + [_d]
1075 _d = os.path.join(*args)
1067 _d = os.path.join(*args)
1076
1068
1077 if os.path.isdir(rm_path):
1069 if os.path.isdir(rm_path):
1078 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1070 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1079
1071
1080 # finally cleanup diff-cache if it exists
1072 # finally cleanup diff-cache if it exists
1081 cached_diffs_dir = repo.cached_diffs_dir
1073 cached_diffs_dir = repo.cached_diffs_dir
1082 if os.path.isdir(cached_diffs_dir):
1074 if os.path.isdir(cached_diffs_dir):
1083 shutil.rmtree(cached_diffs_dir)
1075 shutil.rmtree(cached_diffs_dir)
1084
1076
1085
1077
1086 class ReadmeFinder:
1078 class ReadmeFinder:
1087 """
1079 """
1088 Utility which knows how to find a readme for a specific commit.
1080 Utility which knows how to find a readme for a specific commit.
1089
1081
1090 The main idea is that this is a configurable algorithm. When creating an
1082 The main idea is that this is a configurable algorithm. When creating an
1091 instance you can define parameters, currently only the `default_renderer`.
1083 instance you can define parameters, currently only the `default_renderer`.
1092 Based on this configuration the method :meth:`search` behaves slightly
1084 Based on this configuration the method :meth:`search` behaves slightly
1093 different.
1085 different.
1094 """
1086 """
1095
1087
1096 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1088 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1097 path_re = re.compile(r'^docs?', re.IGNORECASE)
1089 path_re = re.compile(r'^docs?', re.IGNORECASE)
1098
1090
1099 default_priorities = {
1091 default_priorities = {
1100 None: 0,
1092 None: 0,
1101 '.rst': 1,
1093 '.rst': 1,
1102 '.md': 1,
1094 '.md': 1,
1103 '.rest': 2,
1095 '.rest': 2,
1104 '.mkdn': 2,
1096 '.mkdn': 2,
1105 '.text': 2,
1097 '.text': 2,
1106 '.txt': 3,
1098 '.txt': 3,
1107 '.mdown': 3,
1099 '.mdown': 3,
1108 '.markdown': 4,
1100 '.markdown': 4,
1109 }
1101 }
1110
1102
1111 path_priority = {
1103 path_priority = {
1112 'doc': 0,
1104 'doc': 0,
1113 'docs': 1,
1105 'docs': 1,
1114 }
1106 }
1115
1107
1116 FALLBACK_PRIORITY = 99
1108 FALLBACK_PRIORITY = 99
1117
1109
1118 RENDERER_TO_EXTENSION = {
1110 RENDERER_TO_EXTENSION = {
1119 'rst': ['.rst', '.rest'],
1111 'rst': ['.rst', '.rest'],
1120 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1112 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1121 }
1113 }
1122
1114
1123 def __init__(self, default_renderer=None):
1115 def __init__(self, default_renderer=None):
1124 self._default_renderer = default_renderer
1116 self._default_renderer = default_renderer
1125 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1117 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1126 default_renderer, [])
1118 default_renderer, [])
1127
1119
1128 def search(self, commit, path='/'):
1120 def search(self, commit, path='/'):
1129 """
1121 """
1130 Find a readme in the given `commit`.
1122 Find a readme in the given `commit`.
1131 """
1123 """
1132 # firstly, check the PATH type if it is actually a DIR
1124 # firstly, check the PATH type if it is actually a DIR
1133 if commit.get_node(path).kind != NodeKind.DIR:
1125 if commit.get_node(path).kind != NodeKind.DIR:
1134 return None
1126 return None
1135
1127
1136 nodes = commit.get_nodes(path)
1128 nodes = commit.get_nodes(path)
1137 matches = self._match_readmes(nodes)
1129 matches = self._match_readmes(nodes)
1138 matches = self._sort_according_to_priority(matches)
1130 matches = self._sort_according_to_priority(matches)
1139 if matches:
1131 if matches:
1140 return matches[0].node
1132 return matches[0].node
1141
1133
1142 paths = self._match_paths(nodes)
1134 paths = self._match_paths(nodes)
1143 paths = self._sort_paths_according_to_priority(paths)
1135 paths = self._sort_paths_according_to_priority(paths)
1144 for path in paths:
1136 for path in paths:
1145 match = self.search(commit, path=path)
1137 match = self.search(commit, path=path)
1146 if match:
1138 if match:
1147 return match
1139 return match
1148
1140
1149 return None
1141 return None
1150
1142
1151 def _match_readmes(self, nodes):
1143 def _match_readmes(self, nodes):
1152 for node in nodes:
1144 for node in nodes:
1153 if not node.is_file():
1145 if not node.is_file():
1154 continue
1146 continue
1155 path = node.path.rsplit('/', 1)[-1]
1147 path = node.path.rsplit('/', 1)[-1]
1156 match = self.readme_re.match(path)
1148 match = self.readme_re.match(path)
1157 if match:
1149 if match:
1158 extension = match.group(1)
1150 extension = match.group(1)
1159 yield ReadmeMatch(node, match, self._priority(extension))
1151 yield ReadmeMatch(node, match, self._priority(extension))
1160
1152
1161 def _match_paths(self, nodes):
1153 def _match_paths(self, nodes):
1162 for node in nodes:
1154 for node in nodes:
1163 if not node.is_dir():
1155 if not node.is_dir():
1164 continue
1156 continue
1165 match = self.path_re.match(node.path)
1157 match = self.path_re.match(node.path)
1166 if match:
1158 if match:
1167 yield node.path
1159 yield node.path
1168
1160
1169 def _priority(self, extension):
1161 def _priority(self, extension):
1170 renderer_priority = (
1162 renderer_priority = (
1171 0 if extension in self._renderer_extensions else 1)
1163 0 if extension in self._renderer_extensions else 1)
1172 extension_priority = self.default_priorities.get(
1164 extension_priority = self.default_priorities.get(
1173 extension, self.FALLBACK_PRIORITY)
1165 extension, self.FALLBACK_PRIORITY)
1174 return (renderer_priority, extension_priority)
1166 return (renderer_priority, extension_priority)
1175
1167
1176 def _sort_according_to_priority(self, matches):
1168 def _sort_according_to_priority(self, matches):
1177
1169
1178 def priority_and_path(match):
1170 def priority_and_path(match):
1179 return (match.priority, match.path)
1171 return (match.priority, match.path)
1180
1172
1181 return sorted(matches, key=priority_and_path)
1173 return sorted(matches, key=priority_and_path)
1182
1174
1183 def _sort_paths_according_to_priority(self, paths):
1175 def _sort_paths_according_to_priority(self, paths):
1184
1176
1185 def priority_and_path(path):
1177 def priority_and_path(path):
1186 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1178 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1187
1179
1188 return sorted(paths, key=priority_and_path)
1180 return sorted(paths, key=priority_and_path)
1189
1181
1190
1182
1191 class ReadmeMatch:
1183 class ReadmeMatch:
1192
1184
1193 def __init__(self, node, match, priority):
1185 def __init__(self, node, match, priority):
1194 self.node = node
1186 self.node = node
1195 self._match = match
1187 self._match = match
1196 self.priority = priority
1188 self.priority = priority
1197
1189
1198 @property
1190 @property
1199 def path(self):
1191 def path(self):
1200 return self.node.path
1192 return self.node.path
1201
1193
1202 def __repr__(self):
1194 def __repr__(self):
1203 return f'<ReadmeMatch {self.path} priority={self.priority}'
1195 return f'<ReadmeMatch {self.path} priority={self.priority}'
@@ -1,895 +1,886 b''
1 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 """
20 """
21 repo group model for RhodeCode
21 repo group model for RhodeCode
22 """
22 """
23
23
24 import os
24 import os
25 import datetime
25 import datetime
26 import itertools
26 import itertools
27 import logging
27 import logging
28 import shutil
28 import shutil
29 import time
29 import time
30 import traceback
30 import traceback
31 import string
31 import string
32
32
33 from zope.cachedescriptors.property import Lazy as LazyProperty
33 from zope.cachedescriptors.property import Lazy as LazyProperty
34
34
35 from rhodecode import events
35 from rhodecode import events
36 from rhodecode.model import BaseModel
36 from rhodecode.model import BaseModel
37 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
37 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
38 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
38 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
39 UserGroup, Repository)
39 UserGroup, Repository)
40 from rhodecode.model.permission import PermissionModel
40 from rhodecode.model.permission import PermissionModel
41 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
41 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
42 from rhodecode.lib.caching_query import FromCache
42 from rhodecode.lib.caching_query import FromCache
43 from rhodecode.lib.utils2 import action_logger_generic
43 from rhodecode.lib.utils2 import action_logger_generic
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 class RepoGroupModel(BaseModel):
48 class RepoGroupModel(BaseModel):
49
49
50 cls = RepoGroup
50 cls = RepoGroup
51 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
51 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
52 PERSONAL_GROUP_PATTERN = '${username}' # default
52 PERSONAL_GROUP_PATTERN = '${username}' # default
53
53
54 def _get_user_group(self, users_group):
54 def _get_user_group(self, users_group):
55 return self._get_instance(UserGroup, users_group,
55 return self._get_instance(UserGroup, users_group,
56 callback=UserGroup.get_by_group_name)
56 callback=UserGroup.get_by_group_name)
57
57
58 def _get_repo_group(self, repo_group):
58 def _get_repo_group(self, repo_group):
59 return self._get_instance(RepoGroup, repo_group,
59 return self._get_instance(RepoGroup, repo_group,
60 callback=RepoGroup.get_by_group_name)
60 callback=RepoGroup.get_by_group_name)
61
61
62 def get_repo_group(self, repo_group):
62 def get_repo_group(self, repo_group):
63 return self._get_repo_group(repo_group)
63 return self._get_repo_group(repo_group)
64
64
65 @LazyProperty
66 def repos_path(self):
67 """
68 Gets the repositories root path from database
69 """
70
71 settings_model = VcsSettingsModel(sa=self.sa)
72 return settings_model.get_repos_location()
73
74 def get_by_group_name(self, repo_group_name, cache=None):
65 def get_by_group_name(self, repo_group_name, cache=None):
75 repo = self.sa.query(RepoGroup) \
66 repo = self.sa.query(RepoGroup) \
76 .filter(RepoGroup.group_name == repo_group_name)
67 .filter(RepoGroup.group_name == repo_group_name)
77
68
78 if cache:
69 if cache:
79 name_key = _hash_key(repo_group_name)
70 name_key = _hash_key(repo_group_name)
80 repo = repo.options(
71 repo = repo.options(
81 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
72 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
82 return repo.scalar()
73 return repo.scalar()
83
74
84 def get_default_create_personal_repo_group(self):
75 def get_default_create_personal_repo_group(self):
85 value = SettingsModel().get_setting_by_name(
76 value = SettingsModel().get_setting_by_name(
86 'create_personal_repo_group')
77 'create_personal_repo_group')
87 return value.app_settings_value if value else None or False
78 return value.app_settings_value if value else None or False
88
79
89 def get_personal_group_name_pattern(self):
80 def get_personal_group_name_pattern(self):
90 value = SettingsModel().get_setting_by_name(
81 value = SettingsModel().get_setting_by_name(
91 'personal_repo_group_pattern')
82 'personal_repo_group_pattern')
92 val = value.app_settings_value if value else None
83 val = value.app_settings_value if value else None
93 group_template = val or self.PERSONAL_GROUP_PATTERN
84 group_template = val or self.PERSONAL_GROUP_PATTERN
94
85
95 group_template = group_template.lstrip('/')
86 group_template = group_template.lstrip('/')
96 return group_template
87 return group_template
97
88
98 def get_personal_group_name(self, user):
89 def get_personal_group_name(self, user):
99 template = self.get_personal_group_name_pattern()
90 template = self.get_personal_group_name_pattern()
100 return string.Template(template).safe_substitute(
91 return string.Template(template).safe_substitute(
101 username=user.username,
92 username=user.username,
102 user_id=user.user_id,
93 user_id=user.user_id,
103 first_name=user.first_name,
94 first_name=user.first_name,
104 last_name=user.last_name,
95 last_name=user.last_name,
105 )
96 )
106
97
107 def create_personal_repo_group(self, user, commit_early=True):
98 def create_personal_repo_group(self, user, commit_early=True):
108 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
99 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
109 personal_repo_group_name = self.get_personal_group_name(user)
100 personal_repo_group_name = self.get_personal_group_name(user)
110
101
111 # create a new one
102 # create a new one
112 RepoGroupModel().create(
103 RepoGroupModel().create(
113 group_name=personal_repo_group_name,
104 group_name=personal_repo_group_name,
114 group_description=desc,
105 group_description=desc,
115 owner=user.username,
106 owner=user.username,
116 personal=True,
107 personal=True,
117 commit_early=commit_early)
108 commit_early=commit_early)
118
109
119 def _create_default_perms(self, new_group):
110 def _create_default_perms(self, new_group):
120 # create default permission
111 # create default permission
121 default_perm = 'group.read'
112 default_perm = 'group.read'
122 def_user = User.get_default_user()
113 def_user = User.get_default_user()
123 for p in def_user.user_perms:
114 for p in def_user.user_perms:
124 if p.permission.permission_name.startswith('group.'):
115 if p.permission.permission_name.startswith('group.'):
125 default_perm = p.permission.permission_name
116 default_perm = p.permission.permission_name
126 break
117 break
127
118
128 repo_group_to_perm = UserRepoGroupToPerm()
119 repo_group_to_perm = UserRepoGroupToPerm()
129 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
120 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
130
121
131 repo_group_to_perm.group = new_group
122 repo_group_to_perm.group = new_group
132 repo_group_to_perm.user = def_user
123 repo_group_to_perm.user = def_user
133 return repo_group_to_perm
124 return repo_group_to_perm
134
125
135 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
126 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
136 get_object=False):
127 get_object=False):
137 """
128 """
138 Get's the group name and a parent group name from given group name.
129 Get's the group name and a parent group name from given group name.
139 If repo_in_path is set to truth, we asume the full path also includes
130 If repo_in_path is set to truth, we asume the full path also includes
140 repo name, in such case we clean the last element.
131 repo name, in such case we clean the last element.
141
132
142 :param group_name_full:
133 :param group_name_full:
143 """
134 """
144 split_paths = 1
135 split_paths = 1
145 if repo_in_path:
136 if repo_in_path:
146 split_paths = 2
137 split_paths = 2
147 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
138 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
148
139
149 if repo_in_path and len(_parts) > 1:
140 if repo_in_path and len(_parts) > 1:
150 # such case last element is the repo_name
141 # such case last element is the repo_name
151 _parts.pop(-1)
142 _parts.pop(-1)
152 group_name_cleaned = _parts[-1] # just the group name
143 group_name_cleaned = _parts[-1] # just the group name
153 parent_repo_group_name = None
144 parent_repo_group_name = None
154
145
155 if len(_parts) > 1:
146 if len(_parts) > 1:
156 parent_repo_group_name = _parts[0]
147 parent_repo_group_name = _parts[0]
157
148
158 parent_group = None
149 parent_group = None
159 if parent_repo_group_name:
150 if parent_repo_group_name:
160 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
151 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
161
152
162 if get_object:
153 if get_object:
163 return group_name_cleaned, parent_repo_group_name, parent_group
154 return group_name_cleaned, parent_repo_group_name, parent_group
164
155
165 return group_name_cleaned, parent_repo_group_name
156 return group_name_cleaned, parent_repo_group_name
166
157
167 def check_exist_filesystem(self, group_name, exc_on_failure=True):
158 def check_exist_filesystem(self, group_name, exc_on_failure=True):
168 create_path = os.path.join(self.repos_path, group_name)
159 create_path = os.path.join(self.repos_path, group_name)
169 log.debug('creating new group in %s', create_path)
160 log.debug('creating new group in %s', create_path)
170
161
171 if os.path.isdir(create_path):
162 if os.path.isdir(create_path):
172 if exc_on_failure:
163 if exc_on_failure:
173 abs_create_path = os.path.abspath(create_path)
164 abs_create_path = os.path.abspath(create_path)
174 raise Exception(f'Directory `{abs_create_path}` already exists !')
165 raise Exception(f'Directory `{abs_create_path}` already exists !')
175 return False
166 return False
176 return True
167 return True
177
168
178 def _create_group(self, group_name):
169 def _create_group(self, group_name):
179 """
170 """
180 makes repository group on filesystem
171 makes repository group on filesystem
181
172
182 :param repo_name:
173 :param repo_name:
183 :param parent_id:
174 :param parent_id:
184 """
175 """
185
176
186 self.check_exist_filesystem(group_name)
177 self.check_exist_filesystem(group_name)
187 create_path = os.path.join(self.repos_path, group_name)
178 create_path = os.path.join(self.repos_path, group_name)
188 log.debug('creating new group in %s', create_path)
179 log.debug('creating new group in %s', create_path)
189 os.makedirs(create_path, mode=0o755)
180 os.makedirs(create_path, mode=0o755)
190 log.debug('created group in %s', create_path)
181 log.debug('created group in %s', create_path)
191
182
192 def _rename_group(self, old, new):
183 def _rename_group(self, old, new):
193 """
184 """
194 Renames a group on filesystem
185 Renames a group on filesystem
195
186
196 :param group_name:
187 :param group_name:
197 """
188 """
198
189
199 if old == new:
190 if old == new:
200 log.debug('skipping group rename')
191 log.debug('skipping group rename')
201 return
192 return
202
193
203 log.debug('renaming repository group from %s to %s', old, new)
194 log.debug('renaming repository group from %s to %s', old, new)
204
195
205 old_path = os.path.join(self.repos_path, old)
196 old_path = os.path.join(self.repos_path, old)
206 new_path = os.path.join(self.repos_path, new)
197 new_path = os.path.join(self.repos_path, new)
207
198
208 log.debug('renaming repos paths from %s to %s', old_path, new_path)
199 log.debug('renaming repos paths from %s to %s', old_path, new_path)
209
200
210 if os.path.isdir(new_path):
201 if os.path.isdir(new_path):
211 raise Exception('Was trying to rename to already '
202 raise Exception('Was trying to rename to already '
212 'existing dir %s' % new_path)
203 'existing dir %s' % new_path)
213 shutil.move(old_path, new_path)
204 shutil.move(old_path, new_path)
214
205
215 def _delete_filesystem_group(self, group, force_delete=False):
206 def _delete_filesystem_group(self, group, force_delete=False):
216 """
207 """
217 Deletes a group from a filesystem
208 Deletes a group from a filesystem
218
209
219 :param group: instance of group from database
210 :param group: instance of group from database
220 :param force_delete: use shutil rmtree to remove all objects
211 :param force_delete: use shutil rmtree to remove all objects
221 """
212 """
222 paths = group.full_path.split(RepoGroup.url_sep())
213 paths = group.full_path.split(RepoGroup.url_sep())
223 paths = os.sep.join(paths)
214 paths = os.sep.join(paths)
224
215
225 rm_path = os.path.join(self.repos_path, paths)
216 rm_path = os.path.join(self.repos_path, paths)
226 log.info("Removing group %s", rm_path)
217 log.info("Removing group %s", rm_path)
227 # delete only if that path really exists
218 # delete only if that path really exists
228 if os.path.isdir(rm_path):
219 if os.path.isdir(rm_path):
229 if force_delete:
220 if force_delete:
230 shutil.rmtree(rm_path)
221 shutil.rmtree(rm_path)
231 else:
222 else:
232 # archive that group`
223 # archive that group`
233 _now = datetime.datetime.now()
224 _now = datetime.datetime.now()
234 _ms = str(_now.microsecond).rjust(6, '0')
225 _ms = str(_now.microsecond).rjust(6, '0')
235 _d = 'rm__{}_GROUP_{}'.format(
226 _d = 'rm__{}_GROUP_{}'.format(
236 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
227 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
237 shutil.move(rm_path, os.path.join(self.repos_path, _d))
228 shutil.move(rm_path, os.path.join(self.repos_path, _d))
238
229
239 def create(self, group_name, group_description, owner, just_db=False,
230 def create(self, group_name, group_description, owner, just_db=False,
240 copy_permissions=False, personal=None, commit_early=True):
231 copy_permissions=False, personal=None, commit_early=True):
241
232
242 (group_name_cleaned,
233 (group_name_cleaned,
243 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
234 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
244
235
245 parent_group = None
236 parent_group = None
246 if parent_group_name:
237 if parent_group_name:
247 parent_group = self._get_repo_group(parent_group_name)
238 parent_group = self._get_repo_group(parent_group_name)
248 if not parent_group:
239 if not parent_group:
249 # we tried to create a nested group, but the parent is not
240 # we tried to create a nested group, but the parent is not
250 # existing
241 # existing
251 raise ValueError(
242 raise ValueError(
252 'Parent group `%s` given in `%s` group name '
243 'Parent group `%s` given in `%s` group name '
253 'is not yet existing.' % (parent_group_name, group_name))
244 'is not yet existing.' % (parent_group_name, group_name))
254
245
255 # because we are doing a cleanup, we need to check if such directory
246 # because we are doing a cleanup, we need to check if such directory
256 # already exists. If we don't do that we can accidentally delete
247 # already exists. If we don't do that we can accidentally delete
257 # existing directory via cleanup that can cause data issues, since
248 # existing directory via cleanup that can cause data issues, since
258 # delete does a folder rename to special syntax later cleanup
249 # delete does a folder rename to special syntax later cleanup
259 # functions can delete this
250 # functions can delete this
260 cleanup_group = self.check_exist_filesystem(group_name,
251 cleanup_group = self.check_exist_filesystem(group_name,
261 exc_on_failure=False)
252 exc_on_failure=False)
262 user = self._get_user(owner)
253 user = self._get_user(owner)
263 if not user:
254 if not user:
264 raise ValueError('Owner %s not found as rhodecode user', owner)
255 raise ValueError('Owner %s not found as rhodecode user', owner)
265
256
266 try:
257 try:
267 new_repo_group = RepoGroup()
258 new_repo_group = RepoGroup()
268 new_repo_group.user = user
259 new_repo_group.user = user
269 new_repo_group.group_description = group_description or group_name
260 new_repo_group.group_description = group_description or group_name
270 new_repo_group.parent_group = parent_group
261 new_repo_group.parent_group = parent_group
271 new_repo_group.group_name = group_name
262 new_repo_group.group_name = group_name
272 new_repo_group.personal = personal
263 new_repo_group.personal = personal
273
264
274 self.sa.add(new_repo_group)
265 self.sa.add(new_repo_group)
275
266
276 # create an ADMIN permission for owner except if we're super admin,
267 # create an ADMIN permission for owner except if we're super admin,
277 # later owner should go into the owner field of groups
268 # later owner should go into the owner field of groups
278 if not user.is_admin:
269 if not user.is_admin:
279 self.grant_user_permission(repo_group=new_repo_group,
270 self.grant_user_permission(repo_group=new_repo_group,
280 user=owner, perm='group.admin')
271 user=owner, perm='group.admin')
281
272
282 if parent_group and copy_permissions:
273 if parent_group and copy_permissions:
283 # copy permissions from parent
274 # copy permissions from parent
284 user_perms = UserRepoGroupToPerm.query() \
275 user_perms = UserRepoGroupToPerm.query() \
285 .filter(UserRepoGroupToPerm.group == parent_group).all()
276 .filter(UserRepoGroupToPerm.group == parent_group).all()
286
277
287 group_perms = UserGroupRepoGroupToPerm.query() \
278 group_perms = UserGroupRepoGroupToPerm.query() \
288 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
279 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
289
280
290 for perm in user_perms:
281 for perm in user_perms:
291 # don't copy over the permission for user who is creating
282 # don't copy over the permission for user who is creating
292 # this group, if he is not super admin he get's admin
283 # this group, if he is not super admin he get's admin
293 # permission set above
284 # permission set above
294 if perm.user != user or user.is_admin:
285 if perm.user != user or user.is_admin:
295 UserRepoGroupToPerm.create(
286 UserRepoGroupToPerm.create(
296 perm.user, new_repo_group, perm.permission)
287 perm.user, new_repo_group, perm.permission)
297
288
298 for perm in group_perms:
289 for perm in group_perms:
299 UserGroupRepoGroupToPerm.create(
290 UserGroupRepoGroupToPerm.create(
300 perm.users_group, new_repo_group, perm.permission)
291 perm.users_group, new_repo_group, perm.permission)
301 else:
292 else:
302 perm_obj = self._create_default_perms(new_repo_group)
293 perm_obj = self._create_default_perms(new_repo_group)
303 self.sa.add(perm_obj)
294 self.sa.add(perm_obj)
304
295
305 # now commit the changes, earlier so we are sure everything is in
296 # now commit the changes, earlier so we are sure everything is in
306 # the database.
297 # the database.
307 if commit_early:
298 if commit_early:
308 self.sa.commit()
299 self.sa.commit()
309 if not just_db:
300 if not just_db:
310 self._create_group(new_repo_group.group_name)
301 self._create_group(new_repo_group.group_name)
311
302
312 # trigger the post hook
303 # trigger the post hook
313 from rhodecode.lib import hooks_base
304 from rhodecode.lib import hooks_base
314 repo_group = RepoGroup.get_by_group_name(group_name)
305 repo_group = RepoGroup.get_by_group_name(group_name)
315
306
316 # update repo group commit caches initially
307 # update repo group commit caches initially
317 repo_group.update_commit_cache()
308 repo_group.update_commit_cache()
318
309
319 hooks_base.create_repository_group(
310 hooks_base.create_repository_group(
320 created_by=user.username, **repo_group.get_dict())
311 created_by=user.username, **repo_group.get_dict())
321
312
322 # Trigger create event.
313 # Trigger create event.
323 events.trigger(events.RepoGroupCreateEvent(repo_group))
314 events.trigger(events.RepoGroupCreateEvent(repo_group))
324
315
325 return new_repo_group
316 return new_repo_group
326 except Exception:
317 except Exception:
327 self.sa.rollback()
318 self.sa.rollback()
328 log.exception('Exception occurred when creating repository group, '
319 log.exception('Exception occurred when creating repository group, '
329 'doing cleanup...')
320 'doing cleanup...')
330 # rollback things manually !
321 # rollback things manually !
331 repo_group = RepoGroup.get_by_group_name(group_name)
322 repo_group = RepoGroup.get_by_group_name(group_name)
332 if repo_group:
323 if repo_group:
333 RepoGroup.delete(repo_group.group_id)
324 RepoGroup.delete(repo_group.group_id)
334 self.sa.commit()
325 self.sa.commit()
335 if cleanup_group:
326 if cleanup_group:
336 RepoGroupModel()._delete_filesystem_group(repo_group)
327 RepoGroupModel()._delete_filesystem_group(repo_group)
337 raise
328 raise
338
329
339 def update_permissions(
330 def update_permissions(
340 self, repo_group, perm_additions=None, perm_updates=None,
331 self, repo_group, perm_additions=None, perm_updates=None,
341 perm_deletions=None, recursive=None, check_perms=True,
332 perm_deletions=None, recursive=None, check_perms=True,
342 cur_user=None):
333 cur_user=None):
343 from rhodecode.model.repo import RepoModel
334 from rhodecode.model.repo import RepoModel
344 from rhodecode.lib.auth import HasUserGroupPermissionAny
335 from rhodecode.lib.auth import HasUserGroupPermissionAny
345
336
346 if not perm_additions:
337 if not perm_additions:
347 perm_additions = []
338 perm_additions = []
348 if not perm_updates:
339 if not perm_updates:
349 perm_updates = []
340 perm_updates = []
350 if not perm_deletions:
341 if not perm_deletions:
351 perm_deletions = []
342 perm_deletions = []
352
343
353 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
344 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
354
345
355 changes = {
346 changes = {
356 'added': [],
347 'added': [],
357 'updated': [],
348 'updated': [],
358 'deleted': [],
349 'deleted': [],
359 'default_user_changed': None
350 'default_user_changed': None
360 }
351 }
361
352
362 def _set_perm_user(obj, user, perm):
353 def _set_perm_user(obj, user, perm):
363 if isinstance(obj, RepoGroup):
354 if isinstance(obj, RepoGroup):
364 self.grant_user_permission(
355 self.grant_user_permission(
365 repo_group=obj, user=user, perm=perm)
356 repo_group=obj, user=user, perm=perm)
366 elif isinstance(obj, Repository):
357 elif isinstance(obj, Repository):
367 # private repos will not allow to change the default
358 # private repos will not allow to change the default
368 # permissions using recursive mode
359 # permissions using recursive mode
369 if obj.private and user == User.DEFAULT_USER:
360 if obj.private and user == User.DEFAULT_USER:
370 return
361 return
371
362
372 # we set group permission but we have to switch to repo
363 # we set group permission but we have to switch to repo
373 # permission
364 # permission
374 perm = perm.replace('group.', 'repository.')
365 perm = perm.replace('group.', 'repository.')
375 RepoModel().grant_user_permission(
366 RepoModel().grant_user_permission(
376 repo=obj, user=user, perm=perm)
367 repo=obj, user=user, perm=perm)
377
368
378 def _set_perm_group(obj, users_group, perm):
369 def _set_perm_group(obj, users_group, perm):
379 if isinstance(obj, RepoGroup):
370 if isinstance(obj, RepoGroup):
380 self.grant_user_group_permission(
371 self.grant_user_group_permission(
381 repo_group=obj, group_name=users_group, perm=perm)
372 repo_group=obj, group_name=users_group, perm=perm)
382 elif isinstance(obj, Repository):
373 elif isinstance(obj, Repository):
383 # we set group permission but we have to switch to repo
374 # we set group permission but we have to switch to repo
384 # permission
375 # permission
385 perm = perm.replace('group.', 'repository.')
376 perm = perm.replace('group.', 'repository.')
386 RepoModel().grant_user_group_permission(
377 RepoModel().grant_user_group_permission(
387 repo=obj, group_name=users_group, perm=perm)
378 repo=obj, group_name=users_group, perm=perm)
388
379
389 def _revoke_perm_user(obj, user):
380 def _revoke_perm_user(obj, user):
390 if isinstance(obj, RepoGroup):
381 if isinstance(obj, RepoGroup):
391 self.revoke_user_permission(repo_group=obj, user=user)
382 self.revoke_user_permission(repo_group=obj, user=user)
392 elif isinstance(obj, Repository):
383 elif isinstance(obj, Repository):
393 RepoModel().revoke_user_permission(repo=obj, user=user)
384 RepoModel().revoke_user_permission(repo=obj, user=user)
394
385
395 def _revoke_perm_group(obj, user_group):
386 def _revoke_perm_group(obj, user_group):
396 if isinstance(obj, RepoGroup):
387 if isinstance(obj, RepoGroup):
397 self.revoke_user_group_permission(
388 self.revoke_user_group_permission(
398 repo_group=obj, group_name=user_group)
389 repo_group=obj, group_name=user_group)
399 elif isinstance(obj, Repository):
390 elif isinstance(obj, Repository):
400 RepoModel().revoke_user_group_permission(
391 RepoModel().revoke_user_group_permission(
401 repo=obj, group_name=user_group)
392 repo=obj, group_name=user_group)
402
393
403 # start updates
394 # start updates
404 log.debug('Now updating permissions for %s in recursive mode:%s',
395 log.debug('Now updating permissions for %s in recursive mode:%s',
405 repo_group, recursive)
396 repo_group, recursive)
406
397
407 # initialize check function, we'll call that multiple times
398 # initialize check function, we'll call that multiple times
408 has_group_perm = HasUserGroupPermissionAny(*req_perms)
399 has_group_perm = HasUserGroupPermissionAny(*req_perms)
409
400
410 for obj in repo_group.recursive_groups_and_repos():
401 for obj in repo_group.recursive_groups_and_repos():
411 # iterated obj is an instance of a repos group or repository in
402 # iterated obj is an instance of a repos group or repository in
412 # that group, recursive option can be: none, repos, groups, all
403 # that group, recursive option can be: none, repos, groups, all
413 if recursive == 'all':
404 if recursive == 'all':
414 obj = obj
405 obj = obj
415 elif recursive == 'repos':
406 elif recursive == 'repos':
416 # skip groups, other than this one
407 # skip groups, other than this one
417 if isinstance(obj, RepoGroup) and not obj == repo_group:
408 if isinstance(obj, RepoGroup) and not obj == repo_group:
418 continue
409 continue
419 elif recursive == 'groups':
410 elif recursive == 'groups':
420 # skip repos
411 # skip repos
421 if isinstance(obj, Repository):
412 if isinstance(obj, Repository):
422 continue
413 continue
423 else: # recursive == 'none':
414 else: # recursive == 'none':
424 # DEFAULT option - don't apply to iterated objects
415 # DEFAULT option - don't apply to iterated objects
425 # also we do a break at the end of this loop. if we are not
416 # also we do a break at the end of this loop. if we are not
426 # in recursive mode
417 # in recursive mode
427 obj = repo_group
418 obj = repo_group
428
419
429 change_obj = obj.get_api_data()
420 change_obj = obj.get_api_data()
430
421
431 # update permissions
422 # update permissions
432 for member_id, perm, member_type in perm_updates:
423 for member_id, perm, member_type in perm_updates:
433 member_id = int(member_id)
424 member_id = int(member_id)
434 if member_type == 'user':
425 if member_type == 'user':
435 member_name = User.get(member_id).username
426 member_name = User.get(member_id).username
436 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
427 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
437 # NOTE(dan): detect if we changed permissions for default user
428 # NOTE(dan): detect if we changed permissions for default user
438 perm_obj = self.sa.query(UserRepoGroupToPerm) \
429 perm_obj = self.sa.query(UserRepoGroupToPerm) \
439 .filter(UserRepoGroupToPerm.user_id == member_id) \
430 .filter(UserRepoGroupToPerm.user_id == member_id) \
440 .filter(UserRepoGroupToPerm.group == repo_group) \
431 .filter(UserRepoGroupToPerm.group == repo_group) \
441 .scalar()
432 .scalar()
442 if perm_obj and perm_obj.permission.permission_name != perm:
433 if perm_obj and perm_obj.permission.permission_name != perm:
443 changes['default_user_changed'] = True
434 changes['default_user_changed'] = True
444
435
445 # this updates also current one if found
436 # this updates also current one if found
446 _set_perm_user(obj, user=member_id, perm=perm)
437 _set_perm_user(obj, user=member_id, perm=perm)
447 elif member_type == 'user_group':
438 elif member_type == 'user_group':
448 member_name = UserGroup.get(member_id).users_group_name
439 member_name = UserGroup.get(member_id).users_group_name
449 if not check_perms or has_group_perm(member_name,
440 if not check_perms or has_group_perm(member_name,
450 user=cur_user):
441 user=cur_user):
451 _set_perm_group(obj, users_group=member_id, perm=perm)
442 _set_perm_group(obj, users_group=member_id, perm=perm)
452 else:
443 else:
453 raise ValueError("member_type must be 'user' or 'user_group' "
444 raise ValueError("member_type must be 'user' or 'user_group' "
454 "got {} instead".format(member_type))
445 "got {} instead".format(member_type))
455
446
456 changes['updated'].append(
447 changes['updated'].append(
457 {'change_obj': change_obj, 'type': member_type,
448 {'change_obj': change_obj, 'type': member_type,
458 'id': member_id, 'name': member_name, 'new_perm': perm})
449 'id': member_id, 'name': member_name, 'new_perm': perm})
459
450
460 # set new permissions
451 # set new permissions
461 for member_id, perm, member_type in perm_additions:
452 for member_id, perm, member_type in perm_additions:
462 member_id = int(member_id)
453 member_id = int(member_id)
463 if member_type == 'user':
454 if member_type == 'user':
464 member_name = User.get(member_id).username
455 member_name = User.get(member_id).username
465 _set_perm_user(obj, user=member_id, perm=perm)
456 _set_perm_user(obj, user=member_id, perm=perm)
466 elif member_type == 'user_group':
457 elif member_type == 'user_group':
467 # check if we have permissions to alter this usergroup
458 # check if we have permissions to alter this usergroup
468 member_name = UserGroup.get(member_id).users_group_name
459 member_name = UserGroup.get(member_id).users_group_name
469 if not check_perms or has_group_perm(member_name,
460 if not check_perms or has_group_perm(member_name,
470 user=cur_user):
461 user=cur_user):
471 _set_perm_group(obj, users_group=member_id, perm=perm)
462 _set_perm_group(obj, users_group=member_id, perm=perm)
472 else:
463 else:
473 raise ValueError("member_type must be 'user' or 'user_group' "
464 raise ValueError("member_type must be 'user' or 'user_group' "
474 "got {} instead".format(member_type))
465 "got {} instead".format(member_type))
475
466
476 changes['added'].append(
467 changes['added'].append(
477 {'change_obj': change_obj, 'type': member_type,
468 {'change_obj': change_obj, 'type': member_type,
478 'id': member_id, 'name': member_name, 'new_perm': perm})
469 'id': member_id, 'name': member_name, 'new_perm': perm})
479
470
480 # delete permissions
471 # delete permissions
481 for member_id, perm, member_type in perm_deletions:
472 for member_id, perm, member_type in perm_deletions:
482 member_id = int(member_id)
473 member_id = int(member_id)
483 if member_type == 'user':
474 if member_type == 'user':
484 member_name = User.get(member_id).username
475 member_name = User.get(member_id).username
485 _revoke_perm_user(obj, user=member_id)
476 _revoke_perm_user(obj, user=member_id)
486 elif member_type == 'user_group':
477 elif member_type == 'user_group':
487 # check if we have permissions to alter this usergroup
478 # check if we have permissions to alter this usergroup
488 member_name = UserGroup.get(member_id).users_group_name
479 member_name = UserGroup.get(member_id).users_group_name
489 if not check_perms or has_group_perm(member_name,
480 if not check_perms or has_group_perm(member_name,
490 user=cur_user):
481 user=cur_user):
491 _revoke_perm_group(obj, user_group=member_id)
482 _revoke_perm_group(obj, user_group=member_id)
492 else:
483 else:
493 raise ValueError("member_type must be 'user' or 'user_group' "
484 raise ValueError("member_type must be 'user' or 'user_group' "
494 "got {} instead".format(member_type))
485 "got {} instead".format(member_type))
495
486
496 changes['deleted'].append(
487 changes['deleted'].append(
497 {'change_obj': change_obj, 'type': member_type,
488 {'change_obj': change_obj, 'type': member_type,
498 'id': member_id, 'name': member_name, 'new_perm': perm})
489 'id': member_id, 'name': member_name, 'new_perm': perm})
499
490
500 # if it's not recursive call for all,repos,groups
491 # if it's not recursive call for all,repos,groups
501 # break the loop and don't proceed with other changes
492 # break the loop and don't proceed with other changes
502 if recursive not in ['all', 'repos', 'groups']:
493 if recursive not in ['all', 'repos', 'groups']:
503 break
494 break
504
495
505 return changes
496 return changes
506
497
507 def update(self, repo_group, form_data):
498 def update(self, repo_group, form_data):
508 try:
499 try:
509 repo_group = self._get_repo_group(repo_group)
500 repo_group = self._get_repo_group(repo_group)
510 old_path = repo_group.full_path
501 old_path = repo_group.full_path
511
502
512 # change properties
503 # change properties
513 if 'group_description' in form_data:
504 if 'group_description' in form_data:
514 repo_group.group_description = form_data['group_description']
505 repo_group.group_description = form_data['group_description']
515
506
516 if 'enable_locking' in form_data:
507 if 'enable_locking' in form_data:
517 repo_group.enable_locking = form_data['enable_locking']
508 repo_group.enable_locking = form_data['enable_locking']
518
509
519 if 'group_parent_id' in form_data:
510 if 'group_parent_id' in form_data:
520 parent_group = (
511 parent_group = (
521 self._get_repo_group(form_data['group_parent_id']))
512 self._get_repo_group(form_data['group_parent_id']))
522 repo_group.group_parent_id = (
513 repo_group.group_parent_id = (
523 parent_group.group_id if parent_group else None)
514 parent_group.group_id if parent_group else None)
524 repo_group.parent_group = parent_group
515 repo_group.parent_group = parent_group
525
516
526 # mikhail: to update the full_path, we have to explicitly
517 # mikhail: to update the full_path, we have to explicitly
527 # update group_name
518 # update group_name
528 group_name = form_data.get('group_name', repo_group.name)
519 group_name = form_data.get('group_name', repo_group.name)
529 repo_group.group_name = repo_group.get_new_name(group_name)
520 repo_group.group_name = repo_group.get_new_name(group_name)
530
521
531 new_path = repo_group.full_path
522 new_path = repo_group.full_path
532
523
533 affected_user_ids = []
524 affected_user_ids = []
534 if 'user' in form_data:
525 if 'user' in form_data:
535 old_owner_id = repo_group.user.user_id
526 old_owner_id = repo_group.user.user_id
536 new_owner = User.get_by_username(form_data['user'])
527 new_owner = User.get_by_username(form_data['user'])
537 repo_group.user = new_owner
528 repo_group.user = new_owner
538
529
539 if old_owner_id != new_owner.user_id:
530 if old_owner_id != new_owner.user_id:
540 affected_user_ids = [new_owner.user_id, old_owner_id]
531 affected_user_ids = [new_owner.user_id, old_owner_id]
541
532
542 self.sa.add(repo_group)
533 self.sa.add(repo_group)
543
534
544 # iterate over all members of this groups and do fixes
535 # iterate over all members of this groups and do fixes
545 # set locking if given
536 # set locking if given
546 # if obj is a repoGroup also fix the name of the group according
537 # if obj is a repoGroup also fix the name of the group according
547 # to the parent
538 # to the parent
548 # if obj is a Repo fix it's name
539 # if obj is a Repo fix it's name
549 # this can be potentially heavy operation
540 # this can be potentially heavy operation
550 for obj in repo_group.recursive_groups_and_repos():
541 for obj in repo_group.recursive_groups_and_repos():
551 # set the value from it's parent
542 # set the value from it's parent
552 obj.enable_locking = repo_group.enable_locking
543 obj.enable_locking = repo_group.enable_locking
553 if isinstance(obj, RepoGroup):
544 if isinstance(obj, RepoGroup):
554 new_name = obj.get_new_name(obj.name)
545 new_name = obj.get_new_name(obj.name)
555 log.debug('Fixing group %s to new name %s',
546 log.debug('Fixing group %s to new name %s',
556 obj.group_name, new_name)
547 obj.group_name, new_name)
557 obj.group_name = new_name
548 obj.group_name = new_name
558
549
559 elif isinstance(obj, Repository):
550 elif isinstance(obj, Repository):
560 # we need to get all repositories from this new group and
551 # we need to get all repositories from this new group and
561 # rename them accordingly to new group path
552 # rename them accordingly to new group path
562 new_name = obj.get_new_name(obj.just_name)
553 new_name = obj.get_new_name(obj.just_name)
563 log.debug('Fixing repo %s to new name %s',
554 log.debug('Fixing repo %s to new name %s',
564 obj.repo_name, new_name)
555 obj.repo_name, new_name)
565 obj.repo_name = new_name
556 obj.repo_name = new_name
566
557
567 self.sa.add(obj)
558 self.sa.add(obj)
568
559
569 self._rename_group(old_path, new_path)
560 self._rename_group(old_path, new_path)
570
561
571 # Trigger update event.
562 # Trigger update event.
572 events.trigger(events.RepoGroupUpdateEvent(repo_group))
563 events.trigger(events.RepoGroupUpdateEvent(repo_group))
573
564
574 if affected_user_ids:
565 if affected_user_ids:
575 PermissionModel().trigger_permission_flush(affected_user_ids)
566 PermissionModel().trigger_permission_flush(affected_user_ids)
576
567
577 return repo_group
568 return repo_group
578 except Exception:
569 except Exception:
579 log.error(traceback.format_exc())
570 log.error(traceback.format_exc())
580 raise
571 raise
581
572
582 def delete(self, repo_group, force_delete=False, fs_remove=True):
573 def delete(self, repo_group, force_delete=False, fs_remove=True):
583 repo_group = self._get_repo_group(repo_group)
574 repo_group = self._get_repo_group(repo_group)
584 if not repo_group:
575 if not repo_group:
585 return False
576 return False
586 try:
577 try:
587 self.sa.delete(repo_group)
578 self.sa.delete(repo_group)
588 if fs_remove:
579 if fs_remove:
589 self._delete_filesystem_group(repo_group, force_delete)
580 self._delete_filesystem_group(repo_group, force_delete)
590 else:
581 else:
591 log.debug('skipping removal from filesystem')
582 log.debug('skipping removal from filesystem')
592
583
593 # Trigger delete event.
584 # Trigger delete event.
594 events.trigger(events.RepoGroupDeleteEvent(repo_group))
585 events.trigger(events.RepoGroupDeleteEvent(repo_group))
595 return True
586 return True
596
587
597 except Exception:
588 except Exception:
598 log.error('Error removing repo_group %s', repo_group)
589 log.error('Error removing repo_group %s', repo_group)
599 raise
590 raise
600
591
601 def grant_user_permission(self, repo_group, user, perm):
592 def grant_user_permission(self, repo_group, user, perm):
602 """
593 """
603 Grant permission for user on given repository group, or update
594 Grant permission for user on given repository group, or update
604 existing one if found
595 existing one if found
605
596
606 :param repo_group: Instance of RepoGroup, repositories_group_id,
597 :param repo_group: Instance of RepoGroup, repositories_group_id,
607 or repositories_group name
598 or repositories_group name
608 :param user: Instance of User, user_id or username
599 :param user: Instance of User, user_id or username
609 :param perm: Instance of Permission, or permission_name
600 :param perm: Instance of Permission, or permission_name
610 """
601 """
611
602
612 repo_group = self._get_repo_group(repo_group)
603 repo_group = self._get_repo_group(repo_group)
613 user = self._get_user(user)
604 user = self._get_user(user)
614 permission = self._get_perm(perm)
605 permission = self._get_perm(perm)
615
606
616 # check if we have that permission already
607 # check if we have that permission already
617 obj = self.sa.query(UserRepoGroupToPerm)\
608 obj = self.sa.query(UserRepoGroupToPerm)\
618 .filter(UserRepoGroupToPerm.user == user)\
609 .filter(UserRepoGroupToPerm.user == user)\
619 .filter(UserRepoGroupToPerm.group == repo_group)\
610 .filter(UserRepoGroupToPerm.group == repo_group)\
620 .scalar()
611 .scalar()
621 if obj is None:
612 if obj is None:
622 # create new !
613 # create new !
623 obj = UserRepoGroupToPerm()
614 obj = UserRepoGroupToPerm()
624 obj.group = repo_group
615 obj.group = repo_group
625 obj.user = user
616 obj.user = user
626 obj.permission = permission
617 obj.permission = permission
627 self.sa.add(obj)
618 self.sa.add(obj)
628 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
619 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
629 action_logger_generic(
620 action_logger_generic(
630 'granted permission: {} to user: {} on repogroup: {}'.format(
621 'granted permission: {} to user: {} on repogroup: {}'.format(
631 perm, user, repo_group), namespace='security.repogroup')
622 perm, user, repo_group), namespace='security.repogroup')
632 return obj
623 return obj
633
624
634 def revoke_user_permission(self, repo_group, user):
625 def revoke_user_permission(self, repo_group, user):
635 """
626 """
636 Revoke permission for user on given repository group
627 Revoke permission for user on given repository group
637
628
638 :param repo_group: Instance of RepoGroup, repositories_group_id,
629 :param repo_group: Instance of RepoGroup, repositories_group_id,
639 or repositories_group name
630 or repositories_group name
640 :param user: Instance of User, user_id or username
631 :param user: Instance of User, user_id or username
641 """
632 """
642
633
643 repo_group = self._get_repo_group(repo_group)
634 repo_group = self._get_repo_group(repo_group)
644 user = self._get_user(user)
635 user = self._get_user(user)
645
636
646 obj = self.sa.query(UserRepoGroupToPerm)\
637 obj = self.sa.query(UserRepoGroupToPerm)\
647 .filter(UserRepoGroupToPerm.user == user)\
638 .filter(UserRepoGroupToPerm.user == user)\
648 .filter(UserRepoGroupToPerm.group == repo_group)\
639 .filter(UserRepoGroupToPerm.group == repo_group)\
649 .scalar()
640 .scalar()
650 if obj:
641 if obj:
651 self.sa.delete(obj)
642 self.sa.delete(obj)
652 log.debug('Revoked perm on %s on %s', repo_group, user)
643 log.debug('Revoked perm on %s on %s', repo_group, user)
653 action_logger_generic(
644 action_logger_generic(
654 'revoked permission from user: {} on repogroup: {}'.format(
645 'revoked permission from user: {} on repogroup: {}'.format(
655 user, repo_group), namespace='security.repogroup')
646 user, repo_group), namespace='security.repogroup')
656
647
657 def grant_user_group_permission(self, repo_group, group_name, perm):
648 def grant_user_group_permission(self, repo_group, group_name, perm):
658 """
649 """
659 Grant permission for user group on given repository group, or update
650 Grant permission for user group on given repository group, or update
660 existing one if found
651 existing one if found
661
652
662 :param repo_group: Instance of RepoGroup, repositories_group_id,
653 :param repo_group: Instance of RepoGroup, repositories_group_id,
663 or repositories_group name
654 or repositories_group name
664 :param group_name: Instance of UserGroup, users_group_id,
655 :param group_name: Instance of UserGroup, users_group_id,
665 or user group name
656 or user group name
666 :param perm: Instance of Permission, or permission_name
657 :param perm: Instance of Permission, or permission_name
667 """
658 """
668 repo_group = self._get_repo_group(repo_group)
659 repo_group = self._get_repo_group(repo_group)
669 group_name = self._get_user_group(group_name)
660 group_name = self._get_user_group(group_name)
670 permission = self._get_perm(perm)
661 permission = self._get_perm(perm)
671
662
672 # check if we have that permission already
663 # check if we have that permission already
673 obj = self.sa.query(UserGroupRepoGroupToPerm)\
664 obj = self.sa.query(UserGroupRepoGroupToPerm)\
674 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
665 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
675 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
666 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
676 .scalar()
667 .scalar()
677
668
678 if obj is None:
669 if obj is None:
679 # create new
670 # create new
680 obj = UserGroupRepoGroupToPerm()
671 obj = UserGroupRepoGroupToPerm()
681
672
682 obj.group = repo_group
673 obj.group = repo_group
683 obj.users_group = group_name
674 obj.users_group = group_name
684 obj.permission = permission
675 obj.permission = permission
685 self.sa.add(obj)
676 self.sa.add(obj)
686 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
677 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
687 action_logger_generic(
678 action_logger_generic(
688 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
679 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
689 perm, group_name, repo_group), namespace='security.repogroup')
680 perm, group_name, repo_group), namespace='security.repogroup')
690 return obj
681 return obj
691
682
692 def revoke_user_group_permission(self, repo_group, group_name):
683 def revoke_user_group_permission(self, repo_group, group_name):
693 """
684 """
694 Revoke permission for user group on given repository group
685 Revoke permission for user group on given repository group
695
686
696 :param repo_group: Instance of RepoGroup, repositories_group_id,
687 :param repo_group: Instance of RepoGroup, repositories_group_id,
697 or repositories_group name
688 or repositories_group name
698 :param group_name: Instance of UserGroup, users_group_id,
689 :param group_name: Instance of UserGroup, users_group_id,
699 or user group name
690 or user group name
700 """
691 """
701 repo_group = self._get_repo_group(repo_group)
692 repo_group = self._get_repo_group(repo_group)
702 group_name = self._get_user_group(group_name)
693 group_name = self._get_user_group(group_name)
703
694
704 obj = self.sa.query(UserGroupRepoGroupToPerm)\
695 obj = self.sa.query(UserGroupRepoGroupToPerm)\
705 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
696 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
706 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
697 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
707 .scalar()
698 .scalar()
708 if obj:
699 if obj:
709 self.sa.delete(obj)
700 self.sa.delete(obj)
710 log.debug('Revoked perm to %s on %s', repo_group, group_name)
701 log.debug('Revoked perm to %s on %s', repo_group, group_name)
711 action_logger_generic(
702 action_logger_generic(
712 'revoked permission from usergroup: {} on repogroup: {}'.format(
703 'revoked permission from usergroup: {} on repogroup: {}'.format(
713 group_name, repo_group), namespace='security.repogroup')
704 group_name, repo_group), namespace='security.repogroup')
714
705
715 @classmethod
706 @classmethod
716 def update_commit_cache(cls, repo_groups=None):
707 def update_commit_cache(cls, repo_groups=None):
717 if not repo_groups:
708 if not repo_groups:
718 repo_groups = RepoGroup.getAll()
709 repo_groups = RepoGroup.getAll()
719 for repo_group in repo_groups:
710 for repo_group in repo_groups:
720 repo_group.update_commit_cache()
711 repo_group.update_commit_cache()
721
712
722 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
713 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
723 super_user_actions=False):
714 super_user_actions=False):
724
715
725 from pyramid.threadlocal import get_current_request
716 from pyramid.threadlocal import get_current_request
726 _render = get_current_request().get_partial_renderer(
717 _render = get_current_request().get_partial_renderer(
727 'rhodecode:templates/data_table/_dt_elements.mako')
718 'rhodecode:templates/data_table/_dt_elements.mako')
728 c = _render.get_call_context()
719 c = _render.get_call_context()
729 h = _render.get_helpers()
720 h = _render.get_helpers()
730
721
731 def quick_menu(repo_group_name):
722 def quick_menu(repo_group_name):
732 return _render('quick_repo_group_menu', repo_group_name)
723 return _render('quick_repo_group_menu', repo_group_name)
733
724
734 def repo_group_lnk(repo_group_name):
725 def repo_group_lnk(repo_group_name):
735 return _render('repo_group_name', repo_group_name)
726 return _render('repo_group_name', repo_group_name)
736
727
737 def last_change(last_change):
728 def last_change(last_change):
738 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
729 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
739 ts = time.time()
730 ts = time.time()
740 utc_offset = (datetime.datetime.fromtimestamp(ts)
731 utc_offset = (datetime.datetime.fromtimestamp(ts)
741 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
732 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
742 last_change = last_change + datetime.timedelta(seconds=utc_offset)
733 last_change = last_change + datetime.timedelta(seconds=utc_offset)
743 return _render("last_change", last_change)
734 return _render("last_change", last_change)
744
735
745 def desc(desc, personal):
736 def desc(desc, personal):
746 return _render(
737 return _render(
747 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
738 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
748
739
749 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
740 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
750 return _render(
741 return _render(
751 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
742 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
752
743
753 def repo_group_name(repo_group_name, children_groups):
744 def repo_group_name(repo_group_name, children_groups):
754 return _render("repo_group_name", repo_group_name, children_groups)
745 return _render("repo_group_name", repo_group_name, children_groups)
755
746
756 def user_profile(username):
747 def user_profile(username):
757 return _render('user_profile', username)
748 return _render('user_profile', username)
758
749
759 repo_group_data = []
750 repo_group_data = []
760 for group in repo_group_list:
751 for group in repo_group_list:
761 # NOTE(marcink): because we use only raw column we need to load it like that
752 # NOTE(marcink): because we use only raw column we need to load it like that
762 changeset_cache = RepoGroup._load_changeset_cache(
753 changeset_cache = RepoGroup._load_changeset_cache(
763 '', group._changeset_cache)
754 '', group._changeset_cache)
764 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
755 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
765 row = {
756 row = {
766 "menu": quick_menu(group.group_name),
757 "menu": quick_menu(group.group_name),
767 "name": repo_group_lnk(group.group_name),
758 "name": repo_group_lnk(group.group_name),
768 "name_raw": group.group_name,
759 "name_raw": group.group_name,
769
760
770 "last_change": last_change(last_commit_change),
761 "last_change": last_change(last_commit_change),
771
762
772 "last_changeset": "",
763 "last_changeset": "",
773 "last_changeset_raw": "",
764 "last_changeset_raw": "",
774
765
775 "desc": desc(h.escape(group.group_description), group.personal),
766 "desc": desc(h.escape(group.group_description), group.personal),
776 "top_level_repos": 0,
767 "top_level_repos": 0,
777 "owner": user_profile(group.User.username)
768 "owner": user_profile(group.User.username)
778 }
769 }
779 if admin:
770 if admin:
780 repo_count = group.repositories.count()
771 repo_count = group.repositories.count()
781 children_groups = list(map(
772 children_groups = list(map(
782 h.safe_str,
773 h.safe_str,
783 itertools.chain((g.name for g in group.parents),
774 itertools.chain((g.name for g in group.parents),
784 (x.name for x in [group]))))
775 (x.name for x in [group]))))
785 row.update({
776 row.update({
786 "action": repo_group_actions(
777 "action": repo_group_actions(
787 group.group_id, group.group_name, repo_count),
778 group.group_id, group.group_name, repo_count),
788 "top_level_repos": repo_count,
779 "top_level_repos": repo_count,
789 "name": repo_group_name(group.group_name, children_groups),
780 "name": repo_group_name(group.group_name, children_groups),
790
781
791 })
782 })
792 repo_group_data.append(row)
783 repo_group_data.append(row)
793
784
794 return repo_group_data
785 return repo_group_data
795
786
796 def get_repo_groups_data_table(
787 def get_repo_groups_data_table(
797 self, draw, start, limit,
788 self, draw, start, limit,
798 search_q, order_by, order_dir,
789 search_q, order_by, order_dir,
799 auth_user, repo_group_id):
790 auth_user, repo_group_id):
800 from rhodecode.model.scm import RepoGroupList
791 from rhodecode.model.scm import RepoGroupList
801
792
802 _perms = ['group.read', 'group.write', 'group.admin']
793 _perms = ['group.read', 'group.write', 'group.admin']
803 repo_groups = RepoGroup.query() \
794 repo_groups = RepoGroup.query() \
804 .filter(RepoGroup.group_parent_id == repo_group_id) \
795 .filter(RepoGroup.group_parent_id == repo_group_id) \
805 .all()
796 .all()
806 auth_repo_group_list = RepoGroupList(
797 auth_repo_group_list = RepoGroupList(
807 repo_groups, perm_set=_perms,
798 repo_groups, perm_set=_perms,
808 extra_kwargs=dict(user=auth_user))
799 extra_kwargs=dict(user=auth_user))
809
800
810 allowed_ids = [-1]
801 allowed_ids = [-1]
811 for repo_group in auth_repo_group_list:
802 for repo_group in auth_repo_group_list:
812 allowed_ids.append(repo_group.group_id)
803 allowed_ids.append(repo_group.group_id)
813
804
814 repo_groups_data_total_count = RepoGroup.query() \
805 repo_groups_data_total_count = RepoGroup.query() \
815 .filter(RepoGroup.group_parent_id == repo_group_id) \
806 .filter(RepoGroup.group_parent_id == repo_group_id) \
816 .filter(or_(
807 .filter(or_(
817 # generate multiple IN to fix limitation problems
808 # generate multiple IN to fix limitation problems
818 *in_filter_generator(RepoGroup.group_id, allowed_ids))
809 *in_filter_generator(RepoGroup.group_id, allowed_ids))
819 ) \
810 ) \
820 .count()
811 .count()
821
812
822 base_q = Session.query(
813 base_q = Session.query(
823 RepoGroup.group_name,
814 RepoGroup.group_name,
824 RepoGroup.group_name_hash,
815 RepoGroup.group_name_hash,
825 RepoGroup.group_description,
816 RepoGroup.group_description,
826 RepoGroup.group_id,
817 RepoGroup.group_id,
827 RepoGroup.personal,
818 RepoGroup.personal,
828 RepoGroup.updated_on,
819 RepoGroup.updated_on,
829 RepoGroup._changeset_cache,
820 RepoGroup._changeset_cache,
830 User,
821 User,
831 ) \
822 ) \
832 .filter(RepoGroup.group_parent_id == repo_group_id) \
823 .filter(RepoGroup.group_parent_id == repo_group_id) \
833 .filter(or_(
824 .filter(or_(
834 # generate multiple IN to fix limitation problems
825 # generate multiple IN to fix limitation problems
835 *in_filter_generator(RepoGroup.group_id, allowed_ids))
826 *in_filter_generator(RepoGroup.group_id, allowed_ids))
836 ) \
827 ) \
837 .join(User, User.user_id == RepoGroup.user_id) \
828 .join(User, User.user_id == RepoGroup.user_id) \
838 .group_by(RepoGroup, User)
829 .group_by(RepoGroup, User)
839
830
840 repo_groups_data_total_filtered_count = base_q.count()
831 repo_groups_data_total_filtered_count = base_q.count()
841
832
842 sort_defined = False
833 sort_defined = False
843
834
844 if order_by == 'group_name':
835 if order_by == 'group_name':
845 sort_col = func.lower(RepoGroup.group_name)
836 sort_col = func.lower(RepoGroup.group_name)
846 sort_defined = True
837 sort_defined = True
847 elif order_by == 'user_username':
838 elif order_by == 'user_username':
848 sort_col = User.username
839 sort_col = User.username
849 else:
840 else:
850 sort_col = getattr(RepoGroup, order_by, None)
841 sort_col = getattr(RepoGroup, order_by, None)
851
842
852 if sort_defined or sort_col:
843 if sort_defined or sort_col:
853 if order_dir == 'asc':
844 if order_dir == 'asc':
854 sort_col = sort_col.asc()
845 sort_col = sort_col.asc()
855 else:
846 else:
856 sort_col = sort_col.desc()
847 sort_col = sort_col.desc()
857
848
858 base_q = base_q.order_by(sort_col)
849 base_q = base_q.order_by(sort_col)
859 base_q = base_q.offset(start).limit(limit)
850 base_q = base_q.offset(start).limit(limit)
860
851
861 repo_group_list = base_q.all()
852 repo_group_list = base_q.all()
862
853
863 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
854 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
864 repo_group_list=repo_group_list, admin=False)
855 repo_group_list=repo_group_list, admin=False)
865
856
866 data = ({
857 data = ({
867 'draw': draw,
858 'draw': draw,
868 'data': repo_groups_data,
859 'data': repo_groups_data,
869 'recordsTotal': repo_groups_data_total_count,
860 'recordsTotal': repo_groups_data_total_count,
870 'recordsFiltered': repo_groups_data_total_filtered_count,
861 'recordsFiltered': repo_groups_data_total_filtered_count,
871 })
862 })
872 return data
863 return data
873
864
874 def _get_defaults(self, repo_group_name):
865 def _get_defaults(self, repo_group_name):
875 repo_group = RepoGroup.get_by_group_name(repo_group_name)
866 repo_group = RepoGroup.get_by_group_name(repo_group_name)
876
867
877 if repo_group is None:
868 if repo_group is None:
878 return None
869 return None
879
870
880 defaults = repo_group.get_dict()
871 defaults = repo_group.get_dict()
881 defaults['repo_group_name'] = repo_group.name
872 defaults['repo_group_name'] = repo_group.name
882 defaults['repo_group_description'] = repo_group.group_description
873 defaults['repo_group_description'] = repo_group.group_description
883 defaults['repo_group_enable_locking'] = repo_group.enable_locking
874 defaults['repo_group_enable_locking'] = repo_group.enable_locking
884
875
885 # we use -1 as this is how in HTML, we mark an empty group
876 # we use -1 as this is how in HTML, we mark an empty group
886 defaults['repo_group'] = defaults['group_parent_id'] or -1
877 defaults['repo_group'] = defaults['group_parent_id'] or -1
887
878
888 # fill owner
879 # fill owner
889 if repo_group.user:
880 if repo_group.user:
890 defaults.update({'user': repo_group.user.username})
881 defaults.update({'user': repo_group.user.username})
891 else:
882 else:
892 replacement_user = User.get_first_super_admin().username
883 replacement_user = User.get_first_super_admin().username
893 defaults.update({'user': replacement_user})
884 defaults.update({'user': replacement_user})
894
885
895 return defaults
886 return defaults
@@ -1,1062 +1,1053 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Scm model for RhodeCode
20 Scm model for RhodeCode
21 """
21 """
22
22
23 import os.path
23 import os.path
24 import traceback
24 import traceback
25 import logging
25 import logging
26 import io
26 import io
27
27
28 from sqlalchemy import func
28 from sqlalchemy import func
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.lib.str_utils import safe_bytes
33 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib import helpers as h, rc_cache
37 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasUserGroupPermissionAny)
40 HasUserGroupPermissionAny)
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib import hooks_utils
42 from rhodecode.lib import hooks_utils
43 from rhodecode.lib.utils import (
43 from rhodecode.lib.utils import (
44 get_filesystem_repos, make_db_config)
44 get_filesystem_repos, make_db_config)
45 from rhodecode.lib.str_utils import safe_str
45 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.system_info import get_system_info
46 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.model import BaseModel
47 from rhodecode.model import BaseModel
48 from rhodecode.model.db import (
48 from rhodecode.model.db import (
49 or_, false, null,
49 or_, false, null,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest, FileStore)
51 PullRequest, FileStore)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker(*self.perm_set)
128 self.perm_checker = perm_checker(*self.perm_set)
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 for db_obj in self.obj_list:
138 for db_obj in self.obj_list:
139 # check permission at this level
139 # check permission at this level
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 name = db_obj.__dict__.get(self.obj_attr, None)
141 name = db_obj.__dict__.get(self.obj_attr, None)
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153
153
154 super().__init__(
154 super().__init__(
155 obj_list=db_repo_list,
155 obj_list=db_repo_list,
156 obj_attr='_repo_name', perm_set=perm_set,
156 obj_attr='_repo_name', perm_set=perm_set,
157 perm_checker=HasRepoPermissionAny,
157 perm_checker=HasRepoPermissionAny,
158 extra_kwargs=extra_kwargs)
158 extra_kwargs=extra_kwargs)
159
159
160
160
161 class RepoGroupList(_PermCheckIterator):
161 class RepoGroupList(_PermCheckIterator):
162
162
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 if not perm_set:
164 if not perm_set:
165 perm_set = ['group.read', 'group.write', 'group.admin']
165 perm_set = ['group.read', 'group.write', 'group.admin']
166
166
167 super().__init__(
167 super().__init__(
168 obj_list=db_repo_group_list,
168 obj_list=db_repo_group_list,
169 obj_attr='_group_name', perm_set=perm_set,
169 obj_attr='_group_name', perm_set=perm_set,
170 perm_checker=HasRepoGroupPermissionAny,
170 perm_checker=HasRepoGroupPermissionAny,
171 extra_kwargs=extra_kwargs)
171 extra_kwargs=extra_kwargs)
172
172
173
173
174 class UserGroupList(_PermCheckIterator):
174 class UserGroupList(_PermCheckIterator):
175
175
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 if not perm_set:
177 if not perm_set:
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179
179
180 super().__init__(
180 super().__init__(
181 obj_list=db_user_group_list,
181 obj_list=db_user_group_list,
182 obj_attr='users_group_name', perm_set=perm_set,
182 obj_attr='users_group_name', perm_set=perm_set,
183 perm_checker=HasUserGroupPermissionAny,
183 perm_checker=HasUserGroupPermissionAny,
184 extra_kwargs=extra_kwargs)
184 extra_kwargs=extra_kwargs)
185
185
186
186
187 class ScmModel(BaseModel):
187 class ScmModel(BaseModel):
188 """
188 """
189 Generic Scm Model
189 Generic Scm Model
190 """
190 """
191
191
192 @LazyProperty
193 def repos_path(self):
194 """
195 Gets the repositories root path from database
196 """
197
198 settings_model = VcsSettingsModel(sa=self.sa)
199 return settings_model.get_repos_location()
200
201 def repo_scan(self, repos_path=None):
192 def repo_scan(self, repos_path=None):
202 """
193 """
203 Listing of repositories in given path. This path should not be a
194 Listing of repositories in given path. This path should not be a
204 repository itself. Return a dictionary of repository objects
195 repository itself. Return a dictionary of repository objects
205
196
206 :param repos_path: path to directory containing repositories
197 :param repos_path: path to directory containing repositories
207 """
198 """
208
199
209 if repos_path is None:
200 if repos_path is None:
210 repos_path = self.repos_path
201 repos_path = self.repos_path
211
202
212 log.info('scanning for repositories in %s', repos_path)
203 log.info('scanning for repositories in %s', repos_path)
213
204
214 config = make_db_config()
205 config = make_db_config()
215 config.set('extensions', 'largefiles', '')
206 config.set('extensions', 'largefiles', '')
216 repos = {}
207 repos = {}
217
208
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
209 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 # name need to be decomposed and put back together using the /
210 # name need to be decomposed and put back together using the /
220 # since this is internal storage separator for rhodecode
211 # since this is internal storage separator for rhodecode
221 name = Repository.normalize_repo_name(name)
212 name = Repository.normalize_repo_name(name)
222
213
223 try:
214 try:
224 if name in repos:
215 if name in repos:
225 raise RepositoryError('Duplicate repository name %s '
216 raise RepositoryError('Duplicate repository name %s '
226 'found in %s' % (name, path))
217 'found in %s' % (name, path))
227 elif path[0] in rhodecode.BACKENDS:
218 elif path[0] in rhodecode.BACKENDS:
228 backend = get_backend(path[0])
219 backend = get_backend(path[0])
229 repos[name] = backend(path[1], config=config,
220 repos[name] = backend(path[1], config=config,
230 with_wire={"cache": False})
221 with_wire={"cache": False})
231 except OSError:
222 except OSError:
232 continue
223 continue
233 except RepositoryError:
224 except RepositoryError:
234 log.exception('Failed to create a repo')
225 log.exception('Failed to create a repo')
235 continue
226 continue
236
227
237 log.debug('found %s paths with repositories', len(repos))
228 log.debug('found %s paths with repositories', len(repos))
238 return repos
229 return repos
239
230
240 def get_repos(self, all_repos=None, sort_key=None):
231 def get_repos(self, all_repos=None, sort_key=None):
241 """
232 """
242 Get all repositories from db and for each repo create it's
233 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
234 backend instance and fill that backed with information from database
244
235
245 :param all_repos: list of repository names as strings
236 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
237 give specific repositories list, good for filtering
247
238
248 :param sort_key: initial sorting of repositories
239 :param sort_key: initial sorting of repositories
249 """
240 """
250 if all_repos is None:
241 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
242 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == null())\
243 .filter(Repository.group_id == null())\
253 .order_by(func.lower(Repository.repo_name)).all()
244 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
245 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
246 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
247 return repo_iter
257
248
258 @staticmethod
249 @staticmethod
259 def get_parent_commits(parent_commit, scm_instance):
250 def get_parent_commits(parent_commit, scm_instance):
260 if not parent_commit:
251 if not parent_commit:
261 parent_commit = EmptyCommit(alias=scm_instance.alias)
252 parent_commit = EmptyCommit(alias=scm_instance.alias)
262
253
263 if isinstance(parent_commit, EmptyCommit):
254 if isinstance(parent_commit, EmptyCommit):
264 # EmptyCommit means we're editing empty repository
255 # EmptyCommit means we're editing empty repository
265 parents = None
256 parents = None
266 else:
257 else:
267 parents = [parent_commit]
258 parents = [parent_commit]
268 return parent_commit, parents
259 return parent_commit, parents
269
260
270 def initialize_inmemory_vars(self, user, repo, message, author):
261 def initialize_inmemory_vars(self, user, repo, message, author):
271 """
262 """
272 Initialize node specific objects for further usage
263 Initialize node specific objects for further usage
273 """
264 """
274 user = self._get_user(user)
265 user = self._get_user(user)
275 scm_instance = repo.scm_instance(cache=False)
266 scm_instance = repo.scm_instance(cache=False)
276 message = safe_str(message)
267 message = safe_str(message)
277 commiter = user.full_contact
268 commiter = user.full_contact
278 author = safe_str(author) if author else commiter
269 author = safe_str(author) if author else commiter
279 imc = scm_instance.in_memory_commit
270 imc = scm_instance.in_memory_commit
280
271
281 return user, scm_instance, message, commiter, author, imc
272 return user, scm_instance, message, commiter, author, imc
282
273
283 def get_repo_groups(self, all_groups=None):
274 def get_repo_groups(self, all_groups=None):
284 if all_groups is None:
275 if all_groups is None:
285 all_groups = RepoGroup.query()\
276 all_groups = RepoGroup.query()\
286 .filter(RepoGroup.group_parent_id == null()).all()
277 .filter(RepoGroup.group_parent_id == null()).all()
287 return [x for x in RepoGroupList(all_groups)]
278 return [x for x in RepoGroupList(all_groups)]
288
279
289 def mark_for_invalidation(self, repo_name, delete=False):
280 def mark_for_invalidation(self, repo_name, delete=False):
290 """
281 """
291 Mark caches of this repo invalid in the database. `delete` flag
282 Mark caches of this repo invalid in the database. `delete` flag
292 removes the cache entries
283 removes the cache entries
293
284
294 :param repo_name: the repo_name for which caches should be marked
285 :param repo_name: the repo_name for which caches should be marked
295 invalid, or deleted
286 invalid, or deleted
296 :param delete: delete the entry keys instead of setting bool
287 :param delete: delete the entry keys instead of setting bool
297 flag on them, and also purge caches used by the dogpile
288 flag on them, and also purge caches used by the dogpile
298 """
289 """
299 repo = Repository.get_by_repo_name(repo_name)
290 repo = Repository.get_by_repo_name(repo_name)
300
291
301 if repo:
292 if repo:
302 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo.repo_id)
293 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo.repo_id)
303 CacheKey.set_invalidate(repo_namespace_key, delete=delete)
294 CacheKey.set_invalidate(repo_namespace_key, delete=delete)
304
295
305 repo_id = repo.repo_id
296 repo_id = repo.repo_id
306 config = repo._config
297 config = repo._config
307 config.set('extensions', 'largefiles', '')
298 config.set('extensions', 'largefiles', '')
308 repo.update_commit_cache(config=config, cs_cache=None)
299 repo.update_commit_cache(config=config, cs_cache=None)
309 if delete:
300 if delete:
310 cache_namespace_uid = f'cache_repo.{repo_id}'
301 cache_namespace_uid = f'cache_repo.{repo_id}'
311 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
302 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
312
303
313 def toggle_following_repo(self, follow_repo_id, user_id):
304 def toggle_following_repo(self, follow_repo_id, user_id):
314
305
315 f = self.sa.query(UserFollowing)\
306 f = self.sa.query(UserFollowing)\
316 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
307 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
317 .filter(UserFollowing.user_id == user_id).scalar()
308 .filter(UserFollowing.user_id == user_id).scalar()
318
309
319 if f is not None:
310 if f is not None:
320 try:
311 try:
321 self.sa.delete(f)
312 self.sa.delete(f)
322 return
313 return
323 except Exception:
314 except Exception:
324 log.error(traceback.format_exc())
315 log.error(traceback.format_exc())
325 raise
316 raise
326
317
327 try:
318 try:
328 f = UserFollowing()
319 f = UserFollowing()
329 f.user_id = user_id
320 f.user_id = user_id
330 f.follows_repo_id = follow_repo_id
321 f.follows_repo_id = follow_repo_id
331 self.sa.add(f)
322 self.sa.add(f)
332 except Exception:
323 except Exception:
333 log.error(traceback.format_exc())
324 log.error(traceback.format_exc())
334 raise
325 raise
335
326
336 def toggle_following_user(self, follow_user_id, user_id):
327 def toggle_following_user(self, follow_user_id, user_id):
337 f = self.sa.query(UserFollowing)\
328 f = self.sa.query(UserFollowing)\
338 .filter(UserFollowing.follows_user_id == follow_user_id)\
329 .filter(UserFollowing.follows_user_id == follow_user_id)\
339 .filter(UserFollowing.user_id == user_id).scalar()
330 .filter(UserFollowing.user_id == user_id).scalar()
340
331
341 if f is not None:
332 if f is not None:
342 try:
333 try:
343 self.sa.delete(f)
334 self.sa.delete(f)
344 return
335 return
345 except Exception:
336 except Exception:
346 log.error(traceback.format_exc())
337 log.error(traceback.format_exc())
347 raise
338 raise
348
339
349 try:
340 try:
350 f = UserFollowing()
341 f = UserFollowing()
351 f.user_id = user_id
342 f.user_id = user_id
352 f.follows_user_id = follow_user_id
343 f.follows_user_id = follow_user_id
353 self.sa.add(f)
344 self.sa.add(f)
354 except Exception:
345 except Exception:
355 log.error(traceback.format_exc())
346 log.error(traceback.format_exc())
356 raise
347 raise
357
348
358 def is_following_repo(self, repo_name, user_id, cache=False):
349 def is_following_repo(self, repo_name, user_id, cache=False):
359 r = self.sa.query(Repository)\
350 r = self.sa.query(Repository)\
360 .filter(Repository.repo_name == repo_name).scalar()
351 .filter(Repository.repo_name == repo_name).scalar()
361
352
362 f = self.sa.query(UserFollowing)\
353 f = self.sa.query(UserFollowing)\
363 .filter(UserFollowing.follows_repository == r)\
354 .filter(UserFollowing.follows_repository == r)\
364 .filter(UserFollowing.user_id == user_id).scalar()
355 .filter(UserFollowing.user_id == user_id).scalar()
365
356
366 return f is not None
357 return f is not None
367
358
368 def is_following_user(self, username, user_id, cache=False):
359 def is_following_user(self, username, user_id, cache=False):
369 u = User.get_by_username(username)
360 u = User.get_by_username(username)
370
361
371 f = self.sa.query(UserFollowing)\
362 f = self.sa.query(UserFollowing)\
372 .filter(UserFollowing.follows_user == u)\
363 .filter(UserFollowing.follows_user == u)\
373 .filter(UserFollowing.user_id == user_id).scalar()
364 .filter(UserFollowing.user_id == user_id).scalar()
374
365
375 return f is not None
366 return f is not None
376
367
377 def get_followers(self, repo):
368 def get_followers(self, repo):
378 repo = self._get_repo(repo)
369 repo = self._get_repo(repo)
379
370
380 return self.sa.query(UserFollowing)\
371 return self.sa.query(UserFollowing)\
381 .filter(UserFollowing.follows_repository == repo).count()
372 .filter(UserFollowing.follows_repository == repo).count()
382
373
383 def get_forks(self, repo):
374 def get_forks(self, repo):
384 repo = self._get_repo(repo)
375 repo = self._get_repo(repo)
385 return self.sa.query(Repository)\
376 return self.sa.query(Repository)\
386 .filter(Repository.fork == repo).count()
377 .filter(Repository.fork == repo).count()
387
378
388 def get_pull_requests(self, repo):
379 def get_pull_requests(self, repo):
389 repo = self._get_repo(repo)
380 repo = self._get_repo(repo)
390 return self.sa.query(PullRequest)\
381 return self.sa.query(PullRequest)\
391 .filter(PullRequest.target_repo == repo)\
382 .filter(PullRequest.target_repo == repo)\
392 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
383 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
393
384
394 def get_artifacts(self, repo):
385 def get_artifacts(self, repo):
395 repo = self._get_repo(repo)
386 repo = self._get_repo(repo)
396 return self.sa.query(FileStore)\
387 return self.sa.query(FileStore)\
397 .filter(FileStore.repo == repo)\
388 .filter(FileStore.repo == repo)\
398 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
389 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
399
390
400 def mark_as_fork(self, repo, fork, user):
391 def mark_as_fork(self, repo, fork, user):
401 repo = self._get_repo(repo)
392 repo = self._get_repo(repo)
402 fork = self._get_repo(fork)
393 fork = self._get_repo(fork)
403 if fork and repo.repo_id == fork.repo_id:
394 if fork and repo.repo_id == fork.repo_id:
404 raise Exception("Cannot set repository as fork of itself")
395 raise Exception("Cannot set repository as fork of itself")
405
396
406 if fork and repo.repo_type != fork.repo_type:
397 if fork and repo.repo_type != fork.repo_type:
407 raise RepositoryError(
398 raise RepositoryError(
408 "Cannot set repository as fork of repository with other type")
399 "Cannot set repository as fork of repository with other type")
409
400
410 repo.fork = fork
401 repo.fork = fork
411 self.sa.add(repo)
402 self.sa.add(repo)
412 return repo
403 return repo
413
404
414 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
405 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
415 dbrepo = self._get_repo(repo)
406 dbrepo = self._get_repo(repo)
416 remote_uri = remote_uri or dbrepo.clone_uri
407 remote_uri = remote_uri or dbrepo.clone_uri
417 if not remote_uri:
408 if not remote_uri:
418 raise Exception("This repository doesn't have a clone uri")
409 raise Exception("This repository doesn't have a clone uri")
419
410
420 repo = dbrepo.scm_instance(cache=False)
411 repo = dbrepo.scm_instance(cache=False)
421 repo.config.clear_section('hooks')
412 repo.config.clear_section('hooks')
422
413
423 try:
414 try:
424 # NOTE(marcink): add extra validation so we skip invalid urls
415 # NOTE(marcink): add extra validation so we skip invalid urls
425 # this is due this tasks can be executed via scheduler without
416 # this is due this tasks can be executed via scheduler without
426 # proper validation of remote_uri
417 # proper validation of remote_uri
427 if validate_uri:
418 if validate_uri:
428 config = make_db_config(clear_session=False)
419 config = make_db_config(clear_session=False)
429 url_validator(remote_uri, dbrepo.repo_type, config)
420 url_validator(remote_uri, dbrepo.repo_type, config)
430 except InvalidCloneUrl:
421 except InvalidCloneUrl:
431 raise
422 raise
432
423
433 repo_name = dbrepo.repo_name
424 repo_name = dbrepo.repo_name
434 try:
425 try:
435 # TODO: we need to make sure those operations call proper hooks !
426 # TODO: we need to make sure those operations call proper hooks !
436 repo.fetch(remote_uri, **kwargs)
427 repo.fetch(remote_uri, **kwargs)
437
428
438 self.mark_for_invalidation(repo_name)
429 self.mark_for_invalidation(repo_name)
439 except Exception:
430 except Exception:
440 log.error(traceback.format_exc())
431 log.error(traceback.format_exc())
441 raise
432 raise
442
433
443 def push_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
434 def push_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
444 dbrepo = self._get_repo(repo)
435 dbrepo = self._get_repo(repo)
445 remote_uri = remote_uri or dbrepo.push_uri
436 remote_uri = remote_uri or dbrepo.push_uri
446 if not remote_uri:
437 if not remote_uri:
447 raise Exception("This repository doesn't have a clone uri")
438 raise Exception("This repository doesn't have a clone uri")
448
439
449 repo = dbrepo.scm_instance(cache=False)
440 repo = dbrepo.scm_instance(cache=False)
450 repo.config.clear_section('hooks')
441 repo.config.clear_section('hooks')
451
442
452 try:
443 try:
453 # NOTE(marcink): add extra validation so we skip invalid urls
444 # NOTE(marcink): add extra validation so we skip invalid urls
454 # this is due this tasks can be executed via scheduler without
445 # this is due this tasks can be executed via scheduler without
455 # proper validation of remote_uri
446 # proper validation of remote_uri
456 if validate_uri:
447 if validate_uri:
457 config = make_db_config(clear_session=False)
448 config = make_db_config(clear_session=False)
458 url_validator(remote_uri, dbrepo.repo_type, config)
449 url_validator(remote_uri, dbrepo.repo_type, config)
459 except InvalidCloneUrl:
450 except InvalidCloneUrl:
460 raise
451 raise
461
452
462 try:
453 try:
463 repo.push(remote_uri, **kwargs)
454 repo.push(remote_uri, **kwargs)
464 except Exception:
455 except Exception:
465 log.error(traceback.format_exc())
456 log.error(traceback.format_exc())
466 raise
457 raise
467
458
468 def commit_change(self, repo, repo_name, commit, user, author, message,
459 def commit_change(self, repo, repo_name, commit, user, author, message,
469 content: bytes, f_path: bytes, branch: str = None):
460 content: bytes, f_path: bytes, branch: str = None):
470 """
461 """
471 Commits changes
462 Commits changes
472 """
463 """
473 user = self._get_user(user)
464 user = self._get_user(user)
474
465
475 # message and author needs to be unicode
466 # message and author needs to be unicode
476 # proper backend should then translate that into required type
467 # proper backend should then translate that into required type
477 message = safe_str(message)
468 message = safe_str(message)
478 author = safe_str(author)
469 author = safe_str(author)
479 imc = repo.in_memory_commit
470 imc = repo.in_memory_commit
480 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
471 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
481 try:
472 try:
482 # TODO: handle pre-push action !
473 # TODO: handle pre-push action !
483 tip = imc.commit(
474 tip = imc.commit(
484 message=message, author=author, parents=[commit],
475 message=message, author=author, parents=[commit],
485 branch=branch or commit.branch)
476 branch=branch or commit.branch)
486 except Exception as e:
477 except Exception as e:
487 log.error(traceback.format_exc())
478 log.error(traceback.format_exc())
488 raise IMCCommitError(str(e))
479 raise IMCCommitError(str(e))
489 finally:
480 finally:
490 # always clear caches, if commit fails we want fresh object also
481 # always clear caches, if commit fails we want fresh object also
491 self.mark_for_invalidation(repo_name)
482 self.mark_for_invalidation(repo_name)
492
483
493 # We trigger the post-push action
484 # We trigger the post-push action
494 hooks_utils.trigger_post_push_hook(
485 hooks_utils.trigger_post_push_hook(
495 username=user.username, action='push_local', hook_type='post_push',
486 username=user.username, action='push_local', hook_type='post_push',
496 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
487 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
497 return tip
488 return tip
498
489
499 def _sanitize_path(self, f_path: bytes):
490 def _sanitize_path(self, f_path: bytes):
500 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
491 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
501 raise NonRelativePathError(b'%b is not an relative path' % f_path)
492 raise NonRelativePathError(b'%b is not an relative path' % f_path)
502 if f_path:
493 if f_path:
503 f_path = os.path.normpath(f_path)
494 f_path = os.path.normpath(f_path)
504 return f_path
495 return f_path
505
496
506 def get_dirnode_metadata(self, request, commit, dir_node):
497 def get_dirnode_metadata(self, request, commit, dir_node):
507 if not dir_node.is_dir():
498 if not dir_node.is_dir():
508 return []
499 return []
509
500
510 data = []
501 data = []
511 for node in dir_node:
502 for node in dir_node:
512 if not node.is_file():
503 if not node.is_file():
513 # we skip file-nodes
504 # we skip file-nodes
514 continue
505 continue
515
506
516 last_commit = node.last_commit
507 last_commit = node.last_commit
517 last_commit_date = last_commit.date
508 last_commit_date = last_commit.date
518 data.append({
509 data.append({
519 'name': node.name,
510 'name': node.name,
520 'size': h.format_byte_size_binary(node.size),
511 'size': h.format_byte_size_binary(node.size),
521 'modified_at': h.format_date(last_commit_date),
512 'modified_at': h.format_date(last_commit_date),
522 'modified_ts': last_commit_date.isoformat(),
513 'modified_ts': last_commit_date.isoformat(),
523 'revision': last_commit.revision,
514 'revision': last_commit.revision,
524 'short_id': last_commit.short_id,
515 'short_id': last_commit.short_id,
525 'message': h.escape(last_commit.message),
516 'message': h.escape(last_commit.message),
526 'author': h.escape(last_commit.author),
517 'author': h.escape(last_commit.author),
527 'user_profile': h.gravatar_with_user(
518 'user_profile': h.gravatar_with_user(
528 request, last_commit.author),
519 request, last_commit.author),
529 })
520 })
530
521
531 return data
522 return data
532
523
533 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
524 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
534 extended_info=False, content=False, max_file_bytes=None):
525 extended_info=False, content=False, max_file_bytes=None):
535 """
526 """
536 recursive walk in root dir and return a set of all path in that dir
527 recursive walk in root dir and return a set of all path in that dir
537 based on repository walk function
528 based on repository walk function
538
529
539 :param repo_name: name of repository
530 :param repo_name: name of repository
540 :param commit_id: commit id for which to list nodes
531 :param commit_id: commit id for which to list nodes
541 :param root_path: root path to list
532 :param root_path: root path to list
542 :param flat: return as a list, if False returns a dict with description
533 :param flat: return as a list, if False returns a dict with description
543 :param extended_info: show additional info such as md5, binary, size etc
534 :param extended_info: show additional info such as md5, binary, size etc
544 :param content: add nodes content to the return data
535 :param content: add nodes content to the return data
545 :param max_file_bytes: will not return file contents over this limit
536 :param max_file_bytes: will not return file contents over this limit
546
537
547 """
538 """
548 _files = list()
539 _files = list()
549 _dirs = list()
540 _dirs = list()
550
541
551 try:
542 try:
552 _repo = self._get_repo(repo_name)
543 _repo = self._get_repo(repo_name)
553 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
544 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
554 root_path = root_path.lstrip('/')
545 root_path = root_path.lstrip('/')
555
546
556 # get RootNode, inject pre-load options before walking
547 # get RootNode, inject pre-load options before walking
557 top_node = commit.get_node(root_path)
548 top_node = commit.get_node(root_path)
558 extended_info_pre_load = []
549 extended_info_pre_load = []
559 if extended_info:
550 if extended_info:
560 extended_info_pre_load += ['md5']
551 extended_info_pre_load += ['md5']
561 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
552 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
562
553
563 for __, dirs, files in commit.walk(top_node):
554 for __, dirs, files in commit.walk(top_node):
564
555
565 for f in files:
556 for f in files:
566 _content = None
557 _content = None
567 _data = f_name = f.str_path
558 _data = f_name = f.str_path
568
559
569 if not flat:
560 if not flat:
570 _data = {
561 _data = {
571 "name": h.escape(f_name),
562 "name": h.escape(f_name),
572 "type": "file",
563 "type": "file",
573 }
564 }
574 if extended_info:
565 if extended_info:
575 _data.update({
566 _data.update({
576 "md5": f.md5,
567 "md5": f.md5,
577 "binary": f.is_binary,
568 "binary": f.is_binary,
578 "size": f.size,
569 "size": f.size,
579 "extension": f.extension,
570 "extension": f.extension,
580 "mimetype": f.mimetype,
571 "mimetype": f.mimetype,
581 "lines": f.lines()[0]
572 "lines": f.lines()[0]
582 })
573 })
583
574
584 if content:
575 if content:
585 over_size_limit = (max_file_bytes is not None
576 over_size_limit = (max_file_bytes is not None
586 and f.size > max_file_bytes)
577 and f.size > max_file_bytes)
587 full_content = None
578 full_content = None
588 if not f.is_binary and not over_size_limit:
579 if not f.is_binary and not over_size_limit:
589 full_content = f.str_content
580 full_content = f.str_content
590
581
591 _data.update({
582 _data.update({
592 "content": full_content,
583 "content": full_content,
593 })
584 })
594 _files.append(_data)
585 _files.append(_data)
595
586
596 for d in dirs:
587 for d in dirs:
597 _data = d_name = d.str_path
588 _data = d_name = d.str_path
598 if not flat:
589 if not flat:
599 _data = {
590 _data = {
600 "name": h.escape(d_name),
591 "name": h.escape(d_name),
601 "type": "dir",
592 "type": "dir",
602 }
593 }
603 if extended_info:
594 if extended_info:
604 _data.update({
595 _data.update({
605 "md5": "",
596 "md5": "",
606 "binary": False,
597 "binary": False,
607 "size": 0,
598 "size": 0,
608 "extension": "",
599 "extension": "",
609 })
600 })
610 if content:
601 if content:
611 _data.update({
602 _data.update({
612 "content": None
603 "content": None
613 })
604 })
614 _dirs.append(_data)
605 _dirs.append(_data)
615 except RepositoryError:
606 except RepositoryError:
616 log.exception("Exception in get_nodes")
607 log.exception("Exception in get_nodes")
617 raise
608 raise
618
609
619 return _dirs, _files
610 return _dirs, _files
620
611
621 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
612 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
622 """
613 """
623 Generate files for quick filter in files view
614 Generate files for quick filter in files view
624 """
615 """
625
616
626 _files = list()
617 _files = list()
627 _dirs = list()
618 _dirs = list()
628 try:
619 try:
629 _repo = self._get_repo(repo_name)
620 _repo = self._get_repo(repo_name)
630 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
621 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
631 root_path = root_path.lstrip('/')
622 root_path = root_path.lstrip('/')
632
623
633 top_node = commit.get_node(root_path)
624 top_node = commit.get_node(root_path)
634 top_node.default_pre_load = []
625 top_node.default_pre_load = []
635
626
636 for __, dirs, files in commit.walk(top_node):
627 for __, dirs, files in commit.walk(top_node):
637 for f in files:
628 for f in files:
638
629
639 _data = {
630 _data = {
640 "name": h.escape(f.str_path),
631 "name": h.escape(f.str_path),
641 "type": "file",
632 "type": "file",
642 }
633 }
643
634
644 _files.append(_data)
635 _files.append(_data)
645
636
646 for d in dirs:
637 for d in dirs:
647
638
648 _data = {
639 _data = {
649 "name": h.escape(d.str_path),
640 "name": h.escape(d.str_path),
650 "type": "dir",
641 "type": "dir",
651 }
642 }
652
643
653 _dirs.append(_data)
644 _dirs.append(_data)
654 except RepositoryError:
645 except RepositoryError:
655 log.exception("Exception in get_quick_filter_nodes")
646 log.exception("Exception in get_quick_filter_nodes")
656 raise
647 raise
657
648
658 return _dirs, _files
649 return _dirs, _files
659
650
660 def get_node(self, repo_name, commit_id, file_path,
651 def get_node(self, repo_name, commit_id, file_path,
661 extended_info=False, content=False, max_file_bytes=None, cache=True):
652 extended_info=False, content=False, max_file_bytes=None, cache=True):
662 """
653 """
663 retrieve single node from commit
654 retrieve single node from commit
664 """
655 """
665
656
666 try:
657 try:
667
658
668 _repo = self._get_repo(repo_name)
659 _repo = self._get_repo(repo_name)
669 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
660 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
670
661
671 file_node = commit.get_node(file_path)
662 file_node = commit.get_node(file_path)
672 if file_node.is_dir():
663 if file_node.is_dir():
673 raise RepositoryError('The given path is a directory')
664 raise RepositoryError('The given path is a directory')
674
665
675 _content = None
666 _content = None
676 f_name = file_node.str_path
667 f_name = file_node.str_path
677
668
678 file_data = {
669 file_data = {
679 "name": h.escape(f_name),
670 "name": h.escape(f_name),
680 "type": "file",
671 "type": "file",
681 }
672 }
682
673
683 if extended_info:
674 if extended_info:
684 file_data.update({
675 file_data.update({
685 "extension": file_node.extension,
676 "extension": file_node.extension,
686 "mimetype": file_node.mimetype,
677 "mimetype": file_node.mimetype,
687 })
678 })
688
679
689 if cache:
680 if cache:
690 md5 = file_node.md5
681 md5 = file_node.md5
691 is_binary = file_node.is_binary
682 is_binary = file_node.is_binary
692 size = file_node.size
683 size = file_node.size
693 else:
684 else:
694 is_binary, md5, size, _content = file_node.metadata_uncached()
685 is_binary, md5, size, _content = file_node.metadata_uncached()
695
686
696 file_data.update({
687 file_data.update({
697 "md5": md5,
688 "md5": md5,
698 "binary": is_binary,
689 "binary": is_binary,
699 "size": size,
690 "size": size,
700 })
691 })
701
692
702 if content and cache:
693 if content and cache:
703 # get content + cache
694 # get content + cache
704 size = file_node.size
695 size = file_node.size
705 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
696 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
706 full_content = None
697 full_content = None
707 all_lines = 0
698 all_lines = 0
708 if not file_node.is_binary and not over_size_limit:
699 if not file_node.is_binary and not over_size_limit:
709 full_content = safe_str(file_node.content)
700 full_content = safe_str(file_node.content)
710 all_lines, empty_lines = file_node.count_lines(full_content)
701 all_lines, empty_lines = file_node.count_lines(full_content)
711
702
712 file_data.update({
703 file_data.update({
713 "content": full_content,
704 "content": full_content,
714 "lines": all_lines
705 "lines": all_lines
715 })
706 })
716 elif content:
707 elif content:
717 # get content *without* cache
708 # get content *without* cache
718 if _content is None:
709 if _content is None:
719 is_binary, md5, size, _content = file_node.metadata_uncached()
710 is_binary, md5, size, _content = file_node.metadata_uncached()
720
711
721 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
712 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
722 full_content = None
713 full_content = None
723 all_lines = 0
714 all_lines = 0
724 if not is_binary and not over_size_limit:
715 if not is_binary and not over_size_limit:
725 full_content = safe_str(_content)
716 full_content = safe_str(_content)
726 all_lines, empty_lines = file_node.count_lines(full_content)
717 all_lines, empty_lines = file_node.count_lines(full_content)
727
718
728 file_data.update({
719 file_data.update({
729 "content": full_content,
720 "content": full_content,
730 "lines": all_lines
721 "lines": all_lines
731 })
722 })
732
723
733 except RepositoryError:
724 except RepositoryError:
734 log.exception("Exception in get_node")
725 log.exception("Exception in get_node")
735 raise
726 raise
736
727
737 return file_data
728 return file_data
738
729
739 def get_fts_data(self, repo_name, commit_id, root_path='/'):
730 def get_fts_data(self, repo_name, commit_id, root_path='/'):
740 """
731 """
741 Fetch node tree for usage in full text search
732 Fetch node tree for usage in full text search
742 """
733 """
743
734
744 tree_info = list()
735 tree_info = list()
745
736
746 try:
737 try:
747 _repo = self._get_repo(repo_name)
738 _repo = self._get_repo(repo_name)
748 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
739 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
749 root_path = root_path.lstrip('/')
740 root_path = root_path.lstrip('/')
750 top_node = commit.get_node(root_path)
741 top_node = commit.get_node(root_path)
751 top_node.default_pre_load = []
742 top_node.default_pre_load = []
752
743
753 for __, dirs, files in commit.walk(top_node):
744 for __, dirs, files in commit.walk(top_node):
754
745
755 for f in files:
746 for f in files:
756 is_binary, md5, size, _content = f.metadata_uncached()
747 is_binary, md5, size, _content = f.metadata_uncached()
757 _data = {
748 _data = {
758 "name": f.str_path,
749 "name": f.str_path,
759 "md5": md5,
750 "md5": md5,
760 "extension": f.extension,
751 "extension": f.extension,
761 "binary": is_binary,
752 "binary": is_binary,
762 "size": size
753 "size": size
763 }
754 }
764
755
765 tree_info.append(_data)
756 tree_info.append(_data)
766
757
767 except RepositoryError:
758 except RepositoryError:
768 log.exception("Exception in get_nodes")
759 log.exception("Exception in get_nodes")
769 raise
760 raise
770
761
771 return tree_info
762 return tree_info
772
763
773 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
764 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
774 author=None, trigger_push_hook=True):
765 author=None, trigger_push_hook=True):
775 """
766 """
776 Commits given multiple nodes into repo
767 Commits given multiple nodes into repo
777
768
778 :param user: RhodeCode User object or user_id, the commiter
769 :param user: RhodeCode User object or user_id, the commiter
779 :param repo: RhodeCode Repository object
770 :param repo: RhodeCode Repository object
780 :param message: commit message
771 :param message: commit message
781 :param nodes: mapping {filename:{'content':content},...}
772 :param nodes: mapping {filename:{'content':content},...}
782 :param parent_commit: parent commit, can be empty than it's
773 :param parent_commit: parent commit, can be empty than it's
783 initial commit
774 initial commit
784 :param author: author of commit, cna be different that commiter
775 :param author: author of commit, cna be different that commiter
785 only for git
776 only for git
786 :param trigger_push_hook: trigger push hooks
777 :param trigger_push_hook: trigger push hooks
787
778
788 :returns: new committed commit
779 :returns: new committed commit
789 """
780 """
790 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
781 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
791 user, repo, message, author)
782 user, repo, message, author)
792
783
793 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
784 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
794
785
795 upload_file_types = (io.BytesIO, io.BufferedRandom)
786 upload_file_types = (io.BytesIO, io.BufferedRandom)
796 processed_nodes = []
787 processed_nodes = []
797 for filename, content_dict in nodes.items():
788 for filename, content_dict in nodes.items():
798 if not isinstance(filename, bytes):
789 if not isinstance(filename, bytes):
799 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
790 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
800 content = content_dict['content']
791 content = content_dict['content']
801 if not isinstance(content, upload_file_types + (bytes,)):
792 if not isinstance(content, upload_file_types + (bytes,)):
802 raise ValueError('content key value in nodes needs to be bytes')
793 raise ValueError('content key value in nodes needs to be bytes')
803
794
804 for f_path in nodes:
795 for f_path in nodes:
805 f_path = self._sanitize_path(f_path)
796 f_path = self._sanitize_path(f_path)
806 content = nodes[f_path]['content']
797 content = nodes[f_path]['content']
807
798
808 # decoding here will force that we have proper encoded values
799 # decoding here will force that we have proper encoded values
809 # in any other case this will throw exceptions and deny commit
800 # in any other case this will throw exceptions and deny commit
810
801
811 if isinstance(content, bytes):
802 if isinstance(content, bytes):
812 pass
803 pass
813 elif isinstance(content, upload_file_types):
804 elif isinstance(content, upload_file_types):
814 content = content.read()
805 content = content.read()
815 else:
806 else:
816 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
807 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
817 processed_nodes.append((f_path, content))
808 processed_nodes.append((f_path, content))
818
809
819 # add multiple nodes
810 # add multiple nodes
820 for path, content in processed_nodes:
811 for path, content in processed_nodes:
821 imc.add(FileNode(path, content=content))
812 imc.add(FileNode(path, content=content))
822
813
823 # TODO: handle pre push scenario
814 # TODO: handle pre push scenario
824 tip = imc.commit(message=message,
815 tip = imc.commit(message=message,
825 author=author,
816 author=author,
826 parents=parents,
817 parents=parents,
827 branch=parent_commit.branch)
818 branch=parent_commit.branch)
828
819
829 self.mark_for_invalidation(repo.repo_name)
820 self.mark_for_invalidation(repo.repo_name)
830 if trigger_push_hook:
821 if trigger_push_hook:
831 hooks_utils.trigger_post_push_hook(
822 hooks_utils.trigger_post_push_hook(
832 username=user.username, action='push_local',
823 username=user.username, action='push_local',
833 repo_name=repo.repo_name, repo_type=scm_instance.alias,
824 repo_name=repo.repo_name, repo_type=scm_instance.alias,
834 hook_type='post_push',
825 hook_type='post_push',
835 commit_ids=[tip.raw_id])
826 commit_ids=[tip.raw_id])
836 return tip
827 return tip
837
828
838 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
829 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
839 author=None, trigger_push_hook=True):
830 author=None, trigger_push_hook=True):
840 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
831 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
841 user, repo, message, author)
832 user, repo, message, author)
842
833
843 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
834 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
844
835
845 # add multiple nodes
836 # add multiple nodes
846 for _filename, data in nodes.items():
837 for _filename, data in nodes.items():
847 # new filename, can be renamed from the old one, also sanitaze
838 # new filename, can be renamed from the old one, also sanitaze
848 # the path for any hack around relative paths like ../../ etc.
839 # the path for any hack around relative paths like ../../ etc.
849 filename = self._sanitize_path(data['filename'])
840 filename = self._sanitize_path(data['filename'])
850 old_filename = self._sanitize_path(_filename)
841 old_filename = self._sanitize_path(_filename)
851 content = data['content']
842 content = data['content']
852 file_mode = data.get('mode')
843 file_mode = data.get('mode')
853 filenode = FileNode(old_filename, content=content, mode=file_mode)
844 filenode = FileNode(old_filename, content=content, mode=file_mode)
854 op = data['op']
845 op = data['op']
855 if op == 'add':
846 if op == 'add':
856 imc.add(filenode)
847 imc.add(filenode)
857 elif op == 'del':
848 elif op == 'del':
858 imc.remove(filenode)
849 imc.remove(filenode)
859 elif op == 'mod':
850 elif op == 'mod':
860 if filename != old_filename:
851 if filename != old_filename:
861 # TODO: handle renames more efficient, needs vcs lib changes
852 # TODO: handle renames more efficient, needs vcs lib changes
862 imc.remove(filenode)
853 imc.remove(filenode)
863 imc.add(FileNode(filename, content=content, mode=file_mode))
854 imc.add(FileNode(filename, content=content, mode=file_mode))
864 else:
855 else:
865 imc.change(filenode)
856 imc.change(filenode)
866
857
867 try:
858 try:
868 # TODO: handle pre push scenario commit changes
859 # TODO: handle pre push scenario commit changes
869 tip = imc.commit(message=message,
860 tip = imc.commit(message=message,
870 author=author,
861 author=author,
871 parents=parents,
862 parents=parents,
872 branch=parent_commit.branch)
863 branch=parent_commit.branch)
873 except NodeNotChangedError:
864 except NodeNotChangedError:
874 raise
865 raise
875 except Exception as e:
866 except Exception as e:
876 log.exception("Unexpected exception during call to imc.commit")
867 log.exception("Unexpected exception during call to imc.commit")
877 raise IMCCommitError(str(e))
868 raise IMCCommitError(str(e))
878 finally:
869 finally:
879 # always clear caches, if commit fails we want fresh object also
870 # always clear caches, if commit fails we want fresh object also
880 self.mark_for_invalidation(repo.repo_name)
871 self.mark_for_invalidation(repo.repo_name)
881
872
882 if trigger_push_hook:
873 if trigger_push_hook:
883 hooks_utils.trigger_post_push_hook(
874 hooks_utils.trigger_post_push_hook(
884 username=user.username, action='push_local', hook_type='post_push',
875 username=user.username, action='push_local', hook_type='post_push',
885 repo_name=repo.repo_name, repo_type=scm_instance.alias,
876 repo_name=repo.repo_name, repo_type=scm_instance.alias,
886 commit_ids=[tip.raw_id])
877 commit_ids=[tip.raw_id])
887
878
888 return tip
879 return tip
889
880
890 def update_binary_node(self, user, repo, message, node, parent_commit=None, author=None):
881 def update_binary_node(self, user, repo, message, node, parent_commit=None, author=None):
891 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
882 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
892 user, repo, message, author)
883 user, repo, message, author)
893
884
894 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
885 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
895
886
896 file_path = node.get('file_path')
887 file_path = node.get('file_path')
897 if isinstance(raw_content := node.get('content'), (io.BytesIO, io.BufferedRandom)):
888 if isinstance(raw_content := node.get('content'), (io.BytesIO, io.BufferedRandom)):
898 content = raw_content.read()
889 content = raw_content.read()
899 else:
890 else:
900 raise Exception("Wrong content was provided")
891 raise Exception("Wrong content was provided")
901 file_node = FileNode(file_path, content=content)
892 file_node = FileNode(file_path, content=content)
902 imc.change(file_node)
893 imc.change(file_node)
903
894
904 try:
895 try:
905 tip = imc.commit(message=message,
896 tip = imc.commit(message=message,
906 author=author,
897 author=author,
907 parents=parents,
898 parents=parents,
908 branch=parent_commit.branch)
899 branch=parent_commit.branch)
909 except NodeNotChangedError:
900 except NodeNotChangedError:
910 raise
901 raise
911 except Exception as e:
902 except Exception as e:
912 log.exception("Unexpected exception during call to imc.commit")
903 log.exception("Unexpected exception during call to imc.commit")
913 raise IMCCommitError(str(e))
904 raise IMCCommitError(str(e))
914 finally:
905 finally:
915 self.mark_for_invalidation(repo.repo_name)
906 self.mark_for_invalidation(repo.repo_name)
916
907
917 hooks_utils.trigger_post_push_hook(
908 hooks_utils.trigger_post_push_hook(
918 username=user.username, action='push_local', hook_type='post_push',
909 username=user.username, action='push_local', hook_type='post_push',
919 repo_name=repo.repo_name, repo_type=scm_instance.alias,
910 repo_name=repo.repo_name, repo_type=scm_instance.alias,
920 commit_ids=[tip.raw_id])
911 commit_ids=[tip.raw_id])
921 return tip
912 return tip
922
913
923 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
914 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
924 author=None, trigger_push_hook=True):
915 author=None, trigger_push_hook=True):
925 """
916 """
926 Deletes given multiple nodes into `repo`
917 Deletes given multiple nodes into `repo`
927
918
928 :param user: RhodeCode User object or user_id, the committer
919 :param user: RhodeCode User object or user_id, the committer
929 :param repo: RhodeCode Repository object
920 :param repo: RhodeCode Repository object
930 :param message: commit message
921 :param message: commit message
931 :param nodes: mapping {filename:{'content':content},...}
922 :param nodes: mapping {filename:{'content':content},...}
932 :param parent_commit: parent commit, can be empty than it's initial
923 :param parent_commit: parent commit, can be empty than it's initial
933 commit
924 commit
934 :param author: author of commit, cna be different that commiter only
925 :param author: author of commit, cna be different that commiter only
935 for git
926 for git
936 :param trigger_push_hook: trigger push hooks
927 :param trigger_push_hook: trigger push hooks
937
928
938 :returns: new commit after deletion
929 :returns: new commit after deletion
939 """
930 """
940
931
941 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
932 user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars(
942 user, repo, message, author)
933 user, repo, message, author)
943
934
944 processed_nodes = []
935 processed_nodes = []
945 for f_path in nodes:
936 for f_path in nodes:
946 f_path = self._sanitize_path(f_path)
937 f_path = self._sanitize_path(f_path)
947 # content can be empty but for compatibility it allows same dicts
938 # content can be empty but for compatibility it allows same dicts
948 # structure as add_nodes
939 # structure as add_nodes
949 content = nodes[f_path].get('content')
940 content = nodes[f_path].get('content')
950 processed_nodes.append((safe_bytes(f_path), content))
941 processed_nodes.append((safe_bytes(f_path), content))
951
942
952 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
943 parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance)
953
944
954 # add multiple nodes
945 # add multiple nodes
955 for path, content in processed_nodes:
946 for path, content in processed_nodes:
956 imc.remove(FileNode(path, content=content))
947 imc.remove(FileNode(path, content=content))
957
948
958 # TODO: handle pre push scenario
949 # TODO: handle pre push scenario
959 tip = imc.commit(message=message,
950 tip = imc.commit(message=message,
960 author=author,
951 author=author,
961 parents=parents,
952 parents=parents,
962 branch=parent_commit.branch)
953 branch=parent_commit.branch)
963
954
964 self.mark_for_invalidation(repo.repo_name)
955 self.mark_for_invalidation(repo.repo_name)
965 if trigger_push_hook:
956 if trigger_push_hook:
966 hooks_utils.trigger_post_push_hook(
957 hooks_utils.trigger_post_push_hook(
967 username=user.username, action='push_local', hook_type='post_push',
958 username=user.username, action='push_local', hook_type='post_push',
968 repo_name=repo.repo_name, repo_type=scm_instance.alias,
959 repo_name=repo.repo_name, repo_type=scm_instance.alias,
969 commit_ids=[tip.raw_id])
960 commit_ids=[tip.raw_id])
970 return tip
961 return tip
971
962
972 def strip(self, repo, commit_id, branch):
963 def strip(self, repo, commit_id, branch):
973 scm_instance = repo.scm_instance(cache=False)
964 scm_instance = repo.scm_instance(cache=False)
974 scm_instance.config.clear_section('hooks')
965 scm_instance.config.clear_section('hooks')
975 scm_instance.strip(commit_id, branch)
966 scm_instance.strip(commit_id, branch)
976 self.mark_for_invalidation(repo.repo_name)
967 self.mark_for_invalidation(repo.repo_name)
977
968
978 def get_unread_journal(self):
969 def get_unread_journal(self):
979 return self.sa.query(UserLog).count()
970 return self.sa.query(UserLog).count()
980
971
981 @classmethod
972 @classmethod
982 def backend_landing_ref(cls, repo_type):
973 def backend_landing_ref(cls, repo_type):
983 """
974 """
984 Return a default landing ref based on a repository type.
975 Return a default landing ref based on a repository type.
985 """
976 """
986
977
987 landing_ref = {
978 landing_ref = {
988 'hg': ('branch:default', 'default'),
979 'hg': ('branch:default', 'default'),
989 'git': ('branch:master', 'master'),
980 'git': ('branch:master', 'master'),
990 'svn': ('rev:tip', 'latest tip'),
981 'svn': ('rev:tip', 'latest tip'),
991 'default': ('rev:tip', 'latest tip'),
982 'default': ('rev:tip', 'latest tip'),
992 }
983 }
993
984
994 return landing_ref.get(repo_type) or landing_ref['default']
985 return landing_ref.get(repo_type) or landing_ref['default']
995
986
996 def get_repo_landing_revs(self, translator, repo=None):
987 def get_repo_landing_revs(self, translator, repo=None):
997 """
988 """
998 Generates select option with tags branches and bookmarks (for hg only)
989 Generates select option with tags branches and bookmarks (for hg only)
999 grouped by type
990 grouped by type
1000
991
1001 :param repo:
992 :param repo:
1002 """
993 """
1003 from rhodecode.lib.vcs.backends.git import GitRepository
994 from rhodecode.lib.vcs.backends.git import GitRepository
1004
995
1005 _ = translator
996 _ = translator
1006 repo = self._get_repo(repo)
997 repo = self._get_repo(repo)
1007
998
1008 if repo:
999 if repo:
1009 repo_type = repo.repo_type
1000 repo_type = repo.repo_type
1010 else:
1001 else:
1011 repo_type = 'default'
1002 repo_type = 'default'
1012
1003
1013 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
1004 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
1014
1005
1015 default_ref_options = [
1006 default_ref_options = [
1016 [default_landing_ref, landing_ref_lbl]
1007 [default_landing_ref, landing_ref_lbl]
1017 ]
1008 ]
1018 default_choices = [
1009 default_choices = [
1019 default_landing_ref
1010 default_landing_ref
1020 ]
1011 ]
1021
1012
1022 if not repo:
1013 if not repo:
1023 # presented at NEW repo creation
1014 # presented at NEW repo creation
1024 return default_choices, default_ref_options
1015 return default_choices, default_ref_options
1025
1016
1026 repo = repo.scm_instance()
1017 repo = repo.scm_instance()
1027
1018
1028 ref_options = [(default_landing_ref, landing_ref_lbl)]
1019 ref_options = [(default_landing_ref, landing_ref_lbl)]
1029 choices = [default_landing_ref]
1020 choices = [default_landing_ref]
1030
1021
1031 # branches
1022 # branches
1032 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1023 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1033 if not branch_group:
1024 if not branch_group:
1034 # new repo, or without maybe a branch?
1025 # new repo, or without maybe a branch?
1035 branch_group = default_ref_options
1026 branch_group = default_ref_options
1036
1027
1037 branches_group = (branch_group, _("Branches"))
1028 branches_group = (branch_group, _("Branches"))
1038 ref_options.append(branches_group)
1029 ref_options.append(branches_group)
1039 choices.extend([x[0] for x in branches_group[0]])
1030 choices.extend([x[0] for x in branches_group[0]])
1040
1031
1041 # bookmarks for HG
1032 # bookmarks for HG
1042 if repo.alias == 'hg':
1033 if repo.alias == 'hg':
1043 bookmarks_group = (
1034 bookmarks_group = (
1044 [(f'book:{safe_str(b)}', safe_str(b))
1035 [(f'book:{safe_str(b)}', safe_str(b))
1045 for b in repo.bookmarks],
1036 for b in repo.bookmarks],
1046 _("Bookmarks"))
1037 _("Bookmarks"))
1047 ref_options.append(bookmarks_group)
1038 ref_options.append(bookmarks_group)
1048 choices.extend([x[0] for x in bookmarks_group[0]])
1039 choices.extend([x[0] for x in bookmarks_group[0]])
1049
1040
1050 # tags
1041 # tags
1051 tags_group = (
1042 tags_group = (
1052 [(f'tag:{safe_str(t)}', safe_str(t))
1043 [(f'tag:{safe_str(t)}', safe_str(t))
1053 for t in repo.tags],
1044 for t in repo.tags],
1054 _("Tags"))
1045 _("Tags"))
1055 ref_options.append(tags_group)
1046 ref_options.append(tags_group)
1056 choices.extend([x[0] for x in tags_group[0]])
1047 choices.extend([x[0] for x in tags_group[0]])
1057
1048
1058 return choices, ref_options
1049 return choices, ref_options
1059
1050
1060 def get_server_info(self, environ=None):
1051 def get_server_info(self, environ=None):
1061 server_info = get_system_info(environ)
1052 server_info = get_system_info(environ)
1062 return server_info
1053 return server_info
@@ -1,909 +1,902 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import re
20 import re
21 import logging
21 import logging
22 import time
22 import time
23 import functools
23 import functools
24 from collections import namedtuple
24 from collections import namedtuple
25
25
26 from pyramid.threadlocal import get_current_request
26 from pyramid.threadlocal import get_current_request
27
27
28 from rhodecode.lib import rc_cache
28 from rhodecode.lib import rc_cache
29 from rhodecode.lib.hash_utils import sha1_safe
29 from rhodecode.lib.hash_utils import sha1_safe
30 from rhodecode.lib.html_filters import sanitize_html
30 from rhodecode.lib.html_filters import sanitize_html
31 from rhodecode.lib.utils2 import (
31 from rhodecode.lib.utils2 import (
32 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
32 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
33 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.backends import base
34 from rhodecode.lib.statsd_client import StatsdClient
34 from rhodecode.lib.statsd_client import StatsdClient
35 from rhodecode.model import BaseModel
35 from rhodecode.model import BaseModel
36 from rhodecode.model.db import (
36 from rhodecode.model.db import (
37 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
37 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
38 from rhodecode.model.meta import Session
38 from rhodecode.model.meta import Session
39
39
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 UiSetting = namedtuple(
44 UiSetting = namedtuple(
45 'UiSetting', ['section', 'key', 'value', 'active'])
45 'UiSetting', ['section', 'key', 'value', 'active'])
46
46
47 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
47 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
48
48
49
49
50 class SettingNotFound(Exception):
50 class SettingNotFound(Exception):
51 def __init__(self, setting_id):
51 def __init__(self, setting_id):
52 msg = f'Setting `{setting_id}` is not found'
52 msg = f'Setting `{setting_id}` is not found'
53 super().__init__(msg)
53 super().__init__(msg)
54
54
55
55
56 class SettingsModel(BaseModel):
56 class SettingsModel(BaseModel):
57 BUILTIN_HOOKS = (
57 BUILTIN_HOOKS = (
58 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
58 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
59 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
59 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
60 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
60 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
61 RhodeCodeUi.HOOK_PUSH_KEY,)
61 RhodeCodeUi.HOOK_PUSH_KEY,)
62 HOOKS_SECTION = 'hooks'
62 HOOKS_SECTION = 'hooks'
63
63
64 def __init__(self, sa=None, repo=None):
64 def __init__(self, sa=None, repo=None):
65 self.repo = repo
65 self.repo = repo
66 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
66 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
67 self.SettingsDbModel = (
67 self.SettingsDbModel = (
68 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
68 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
69 super().__init__(sa)
69 super().__init__(sa)
70
70
71 def get_keyname(self, key_name, prefix='rhodecode_'):
71 def get_keyname(self, key_name, prefix='rhodecode_'):
72 return f'{prefix}{key_name}'
72 return f'{prefix}{key_name}'
73
73
74 def get_ui_by_key(self, key):
74 def get_ui_by_key(self, key):
75 q = self.UiDbModel.query()
75 q = self.UiDbModel.query()
76 q = q.filter(self.UiDbModel.ui_key == key)
76 q = q.filter(self.UiDbModel.ui_key == key)
77 q = self._filter_by_repo(RepoRhodeCodeUi, q)
77 q = self._filter_by_repo(RepoRhodeCodeUi, q)
78 return q.scalar()
78 return q.scalar()
79
79
80 def get_ui_by_section(self, section):
80 def get_ui_by_section(self, section):
81 q = self.UiDbModel.query()
81 q = self.UiDbModel.query()
82 q = q.filter(self.UiDbModel.ui_section == section)
82 q = q.filter(self.UiDbModel.ui_section == section)
83 q = self._filter_by_repo(RepoRhodeCodeUi, q)
83 q = self._filter_by_repo(RepoRhodeCodeUi, q)
84 return q.all()
84 return q.all()
85
85
86 def get_ui_by_section_and_key(self, section, key):
86 def get_ui_by_section_and_key(self, section, key):
87 q = self.UiDbModel.query()
87 q = self.UiDbModel.query()
88 q = q.filter(self.UiDbModel.ui_section == section)
88 q = q.filter(self.UiDbModel.ui_section == section)
89 q = q.filter(self.UiDbModel.ui_key == key)
89 q = q.filter(self.UiDbModel.ui_key == key)
90 q = self._filter_by_repo(RepoRhodeCodeUi, q)
90 q = self._filter_by_repo(RepoRhodeCodeUi, q)
91 return q.scalar()
91 return q.scalar()
92
92
93 def get_ui(self, section=None, key=None):
93 def get_ui(self, section=None, key=None):
94 q = self.UiDbModel.query()
94 q = self.UiDbModel.query()
95 q = self._filter_by_repo(RepoRhodeCodeUi, q)
95 q = self._filter_by_repo(RepoRhodeCodeUi, q)
96
96
97 if section:
97 if section:
98 q = q.filter(self.UiDbModel.ui_section == section)
98 q = q.filter(self.UiDbModel.ui_section == section)
99 if key:
99 if key:
100 q = q.filter(self.UiDbModel.ui_key == key)
100 q = q.filter(self.UiDbModel.ui_key == key)
101
101
102 # TODO: mikhail: add caching
102 # TODO: mikhail: add caching
103 result = [
103 result = [
104 UiSetting(
104 UiSetting(
105 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
105 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
106 value=safe_str(r.ui_value), active=r.ui_active
106 value=safe_str(r.ui_value), active=r.ui_active
107 )
107 )
108 for r in q.all()
108 for r in q.all()
109 ]
109 ]
110 return result
110 return result
111
111
112 def get_builtin_hooks(self):
112 def get_builtin_hooks(self):
113 q = self.UiDbModel.query()
113 q = self.UiDbModel.query()
114 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
114 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
115 return self._get_hooks(q)
115 return self._get_hooks(q)
116
116
117 def get_custom_hooks(self):
117 def get_custom_hooks(self):
118 q = self.UiDbModel.query()
118 q = self.UiDbModel.query()
119 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
119 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
120 return self._get_hooks(q)
120 return self._get_hooks(q)
121
121
122 def create_ui_section_value(self, section, val, key=None, active=True):
122 def create_ui_section_value(self, section, val, key=None, active=True):
123 new_ui = self.UiDbModel()
123 new_ui = self.UiDbModel()
124 new_ui.ui_section = section
124 new_ui.ui_section = section
125 new_ui.ui_value = val
125 new_ui.ui_value = val
126 new_ui.ui_active = active
126 new_ui.ui_active = active
127
127
128 repository_id = ''
128 repository_id = ''
129 if self.repo:
129 if self.repo:
130 repo = self._get_repo(self.repo)
130 repo = self._get_repo(self.repo)
131 repository_id = repo.repo_id
131 repository_id = repo.repo_id
132 new_ui.repository_id = repository_id
132 new_ui.repository_id = repository_id
133
133
134 if not key:
134 if not key:
135 # keys are unique so they need appended info
135 # keys are unique so they need appended info
136 if self.repo:
136 if self.repo:
137 key = sha1_safe(f'{section}{val}{repository_id}')
137 key = sha1_safe(f'{section}{val}{repository_id}')
138 else:
138 else:
139 key = sha1_safe(f'{section}{val}')
139 key = sha1_safe(f'{section}{val}')
140
140
141 new_ui.ui_key = key
141 new_ui.ui_key = key
142
142
143 Session().add(new_ui)
143 Session().add(new_ui)
144 return new_ui
144 return new_ui
145
145
146 def create_or_update_hook(self, key, value):
146 def create_or_update_hook(self, key, value):
147 ui = (
147 ui = (
148 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
148 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
149 self.UiDbModel())
149 self.UiDbModel())
150 ui.ui_section = self.HOOKS_SECTION
150 ui.ui_section = self.HOOKS_SECTION
151 ui.ui_active = True
151 ui.ui_active = True
152 ui.ui_key = key
152 ui.ui_key = key
153 ui.ui_value = value
153 ui.ui_value = value
154
154
155 if self.repo:
155 if self.repo:
156 repo = self._get_repo(self.repo)
156 repo = self._get_repo(self.repo)
157 repository_id = repo.repo_id
157 repository_id = repo.repo_id
158 ui.repository_id = repository_id
158 ui.repository_id = repository_id
159
159
160 Session().add(ui)
160 Session().add(ui)
161 return ui
161 return ui
162
162
163 def delete_ui(self, id_):
163 def delete_ui(self, id_):
164 ui = self.UiDbModel.get(id_)
164 ui = self.UiDbModel.get(id_)
165 if not ui:
165 if not ui:
166 raise SettingNotFound(id_)
166 raise SettingNotFound(id_)
167 Session().delete(ui)
167 Session().delete(ui)
168
168
169 def get_setting_by_name(self, name):
169 def get_setting_by_name(self, name):
170 q = self._get_settings_query()
170 q = self._get_settings_query()
171 q = q.filter(self.SettingsDbModel.app_settings_name == name)
171 q = q.filter(self.SettingsDbModel.app_settings_name == name)
172 return q.scalar()
172 return q.scalar()
173
173
174 def create_or_update_setting(
174 def create_or_update_setting(
175 self, name, val: Optional | str = Optional(''), type_: Optional | str = Optional('unicode')):
175 self, name, val: Optional | str = Optional(''), type_: Optional | str = Optional('unicode')):
176 """
176 """
177 Creates or updates RhodeCode setting. If updates are triggered, it will
177 Creates or updates RhodeCode setting. If updates are triggered, it will
178 only update parameters that are explicitly set Optional instance will
178 only update parameters that are explicitly set Optional instance will
179 be skipped
179 be skipped
180
180
181 :param name:
181 :param name:
182 :param val:
182 :param val:
183 :param type_:
183 :param type_:
184 :return:
184 :return:
185 """
185 """
186
186
187 res = self.get_setting_by_name(name)
187 res = self.get_setting_by_name(name)
188 repo = self._get_repo(self.repo) if self.repo else None
188 repo = self._get_repo(self.repo) if self.repo else None
189
189
190 if not res:
190 if not res:
191 val = Optional.extract(val)
191 val = Optional.extract(val)
192 type_ = Optional.extract(type_)
192 type_ = Optional.extract(type_)
193
193
194 args = (
194 args = (
195 (repo.repo_id, name, val, type_)
195 (repo.repo_id, name, val, type_)
196 if repo else (name, val, type_))
196 if repo else (name, val, type_))
197 res = self.SettingsDbModel(*args)
197 res = self.SettingsDbModel(*args)
198
198
199 else:
199 else:
200 if self.repo:
200 if self.repo:
201 res.repository_id = repo.repo_id
201 res.repository_id = repo.repo_id
202
202
203 res.app_settings_name = name
203 res.app_settings_name = name
204 if not isinstance(type_, Optional):
204 if not isinstance(type_, Optional):
205 # update if set
205 # update if set
206 res.app_settings_type = type_
206 res.app_settings_type = type_
207 if not isinstance(val, Optional):
207 if not isinstance(val, Optional):
208 # update if set
208 # update if set
209 res.app_settings_value = val
209 res.app_settings_value = val
210
210
211 Session().add(res)
211 Session().add(res)
212 return res
212 return res
213
213
214 def get_cache_region(self):
214 def get_cache_region(self):
215 repo = self._get_repo(self.repo) if self.repo else None
215 repo = self._get_repo(self.repo) if self.repo else None
216 cache_key = f"repo.v1.{repo.repo_id}" if repo else "repo.v1.ALL"
216 cache_key = f"repo.v1.{repo.repo_id}" if repo else "repo.v1.ALL"
217 cache_namespace_uid = f'cache_settings.{cache_key}'
217 cache_namespace_uid = f'cache_settings.{cache_key}'
218 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
218 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
219 return region, cache_namespace_uid
219 return region, cache_namespace_uid
220
220
221 def invalidate_settings_cache(self, hard=False):
221 def invalidate_settings_cache(self, hard=False):
222 region, namespace_key = self.get_cache_region()
222 region, namespace_key = self.get_cache_region()
223 log.debug('Invalidation cache [%s] region %s for cache_key: %s',
223 log.debug('Invalidation cache [%s] region %s for cache_key: %s',
224 'invalidate_settings_cache', region, namespace_key)
224 'invalidate_settings_cache', region, namespace_key)
225
225
226 # we use hard cleanup if invalidation is sent
226 # we use hard cleanup if invalidation is sent
227 rc_cache.clear_cache_namespace(region, namespace_key, method=rc_cache.CLEAR_DELETE)
227 rc_cache.clear_cache_namespace(region, namespace_key, method=rc_cache.CLEAR_DELETE)
228
228
229 def get_cache_call_method(self, cache=True):
229 def get_cache_call_method(self, cache=True):
230 region, cache_key = self.get_cache_region()
230 region, cache_key = self.get_cache_region()
231
231
232 @region.conditional_cache_on_arguments(condition=cache)
232 @region.conditional_cache_on_arguments(condition=cache)
233 def _get_all_settings(name, key):
233 def _get_all_settings(name, key):
234 q = self._get_settings_query()
234 q = self._get_settings_query()
235 if not q:
235 if not q:
236 raise Exception('Could not get application settings !')
236 raise Exception('Could not get application settings !')
237
237
238 settings = {
238 settings = {
239 self.get_keyname(res.app_settings_name): res.app_settings_value
239 self.get_keyname(res.app_settings_name): res.app_settings_value
240 for res in q
240 for res in q
241 }
241 }
242 return settings
242 return settings
243 return _get_all_settings
243 return _get_all_settings
244
244
245 def get_all_settings(self, cache=False, from_request=True):
245 def get_all_settings(self, cache=False, from_request=True):
246 # defines if we use GLOBAL, or PER_REPO
246 # defines if we use GLOBAL, or PER_REPO
247 repo = self._get_repo(self.repo) if self.repo else None
247 repo = self._get_repo(self.repo) if self.repo else None
248
248
249 # initially try the request context; this is the fastest
249 # initially try the request context; this is the fastest
250 # we only fetch global config, NOT for repo-specific
250 # we only fetch global config, NOT for repo-specific
251 if from_request and not repo:
251 if from_request and not repo:
252 request = get_current_request()
252 request = get_current_request()
253
253
254 if request and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
254 if request and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
255 rc_config = request.call_context.rc_config
255 rc_config = request.call_context.rc_config
256 if rc_config:
256 if rc_config:
257 return rc_config
257 return rc_config
258
258
259 _region, cache_key = self.get_cache_region()
259 _region, cache_key = self.get_cache_region()
260 _get_all_settings = self.get_cache_call_method(cache=cache)
260 _get_all_settings = self.get_cache_call_method(cache=cache)
261
261
262 start = time.time()
262 start = time.time()
263 result = _get_all_settings('rhodecode_settings', cache_key)
263 result = _get_all_settings('rhodecode_settings', cache_key)
264 compute_time = time.time() - start
264 compute_time = time.time() - start
265 log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time)
265 log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time)
266
266
267 statsd = StatsdClient.statsd
267 statsd = StatsdClient.statsd
268 if statsd:
268 if statsd:
269 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
269 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
270 statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms,
270 statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms,
271 use_decimals=False)
271 use_decimals=False)
272
272
273 log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache)
273 log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache)
274
274
275 return result
275 return result
276
276
277 def get_auth_settings(self):
277 def get_auth_settings(self):
278 q = self._get_settings_query()
278 q = self._get_settings_query()
279 q = q.filter(
279 q = q.filter(
280 self.SettingsDbModel.app_settings_name.startswith('auth_'))
280 self.SettingsDbModel.app_settings_name.startswith('auth_'))
281 rows = q.all()
281 rows = q.all()
282 auth_settings = {
282 auth_settings = {
283 row.app_settings_name: row.app_settings_value for row in rows}
283 row.app_settings_name: row.app_settings_value for row in rows}
284 return auth_settings
284 return auth_settings
285
285
286 def get_auth_plugins(self):
286 def get_auth_plugins(self):
287 auth_plugins = self.get_setting_by_name("auth_plugins")
287 auth_plugins = self.get_setting_by_name("auth_plugins")
288 return auth_plugins.app_settings_value
288 return auth_plugins.app_settings_value
289
289
290 def get_default_repo_settings(self, strip_prefix=False):
290 def get_default_repo_settings(self, strip_prefix=False):
291 q = self._get_settings_query()
291 q = self._get_settings_query()
292 q = q.filter(
292 q = q.filter(
293 self.SettingsDbModel.app_settings_name.startswith('default_'))
293 self.SettingsDbModel.app_settings_name.startswith('default_'))
294 rows = q.all()
294 rows = q.all()
295
295
296 result = {}
296 result = {}
297 for row in rows:
297 for row in rows:
298 key = row.app_settings_name
298 key = row.app_settings_name
299 if strip_prefix:
299 if strip_prefix:
300 key = remove_prefix(key, prefix='default_')
300 key = remove_prefix(key, prefix='default_')
301 result.update({key: row.app_settings_value})
301 result.update({key: row.app_settings_value})
302 return result
302 return result
303
303
304 def get_repo(self):
304 def get_repo(self):
305 repo = self._get_repo(self.repo)
305 repo = self._get_repo(self.repo)
306 if not repo:
306 if not repo:
307 raise Exception(
307 raise Exception(
308 f'Repository `{self.repo}` cannot be found inside the database')
308 f'Repository `{self.repo}` cannot be found inside the database')
309 return repo
309 return repo
310
310
311 def _filter_by_repo(self, model, query):
311 def _filter_by_repo(self, model, query):
312 if self.repo:
312 if self.repo:
313 repo = self.get_repo()
313 repo = self.get_repo()
314 query = query.filter(model.repository_id == repo.repo_id)
314 query = query.filter(model.repository_id == repo.repo_id)
315 return query
315 return query
316
316
317 def _get_hooks(self, query):
317 def _get_hooks(self, query):
318 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
318 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
319 query = self._filter_by_repo(RepoRhodeCodeUi, query)
319 query = self._filter_by_repo(RepoRhodeCodeUi, query)
320 return query.all()
320 return query.all()
321
321
322 def _get_settings_query(self):
322 def _get_settings_query(self):
323 q = self.SettingsDbModel.query()
323 q = self.SettingsDbModel.query()
324 return self._filter_by_repo(RepoRhodeCodeSetting, q)
324 return self._filter_by_repo(RepoRhodeCodeSetting, q)
325
325
326 def list_enabled_social_plugins(self, settings):
326 def list_enabled_social_plugins(self, settings):
327 enabled = []
327 enabled = []
328 for plug in SOCIAL_PLUGINS_LIST:
328 for plug in SOCIAL_PLUGINS_LIST:
329 if str2bool(settings.get(f'rhodecode_auth_{plug}_enabled')):
329 if str2bool(settings.get(f'rhodecode_auth_{plug}_enabled')):
330 enabled.append(plug)
330 enabled.append(plug)
331 return enabled
331 return enabled
332
332
333
333
334 def assert_repo_settings(func):
334 def assert_repo_settings(func):
335 @functools.wraps(func)
335 @functools.wraps(func)
336 def _wrapper(self, *args, **kwargs):
336 def _wrapper(self, *args, **kwargs):
337 if not self.repo_settings:
337 if not self.repo_settings:
338 raise Exception('Repository is not specified')
338 raise Exception('Repository is not specified')
339 return func(self, *args, **kwargs)
339 return func(self, *args, **kwargs)
340 return _wrapper
340 return _wrapper
341
341
342
342
343 class IssueTrackerSettingsModel(object):
343 class IssueTrackerSettingsModel(object):
344 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
344 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
345 SETTINGS_PREFIX = 'issuetracker_'
345 SETTINGS_PREFIX = 'issuetracker_'
346
346
347 def __init__(self, sa=None, repo=None):
347 def __init__(self, sa=None, repo=None):
348 self.global_settings = SettingsModel(sa=sa)
348 self.global_settings = SettingsModel(sa=sa)
349 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
349 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
350
350
351 @property
351 @property
352 def inherit_global_settings(self):
352 def inherit_global_settings(self):
353 if not self.repo_settings:
353 if not self.repo_settings:
354 return True
354 return True
355 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
355 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
356 return setting.app_settings_value if setting else True
356 return setting.app_settings_value if setting else True
357
357
358 @inherit_global_settings.setter
358 @inherit_global_settings.setter
359 def inherit_global_settings(self, value):
359 def inherit_global_settings(self, value):
360 if self.repo_settings:
360 if self.repo_settings:
361 settings = self.repo_settings.create_or_update_setting(
361 settings = self.repo_settings.create_or_update_setting(
362 self.INHERIT_SETTINGS, value, type_='bool')
362 self.INHERIT_SETTINGS, value, type_='bool')
363 Session().add(settings)
363 Session().add(settings)
364
364
365 def _get_keyname(self, key, uid, prefix='rhodecode_'):
365 def _get_keyname(self, key, uid, prefix='rhodecode_'):
366 return f'{prefix}{self.SETTINGS_PREFIX}{key}_{uid}'
366 return f'{prefix}{self.SETTINGS_PREFIX}{key}_{uid}'
367
367
368 def _make_dict_for_settings(self, qs):
368 def _make_dict_for_settings(self, qs):
369 prefix_match = self._get_keyname('pat', '',)
369 prefix_match = self._get_keyname('pat', '',)
370
370
371 issuetracker_entries = {}
371 issuetracker_entries = {}
372 # create keys
372 # create keys
373 for k, v in qs.items():
373 for k, v in qs.items():
374 if k.startswith(prefix_match):
374 if k.startswith(prefix_match):
375 uid = k[len(prefix_match):]
375 uid = k[len(prefix_match):]
376 issuetracker_entries[uid] = None
376 issuetracker_entries[uid] = None
377
377
378 def url_cleaner(input_str):
378 def url_cleaner(input_str):
379 input_str = input_str.replace('"', '').replace("'", '')
379 input_str = input_str.replace('"', '').replace("'", '')
380 input_str = sanitize_html(input_str, strip=True)
380 input_str = sanitize_html(input_str, strip=True)
381 return input_str
381 return input_str
382
382
383 # populate
383 # populate
384 for uid in issuetracker_entries:
384 for uid in issuetracker_entries:
385 url_data = qs.get(self._get_keyname('url', uid))
385 url_data = qs.get(self._get_keyname('url', uid))
386
386
387 pat = qs.get(self._get_keyname('pat', uid))
387 pat = qs.get(self._get_keyname('pat', uid))
388 try:
388 try:
389 pat_compiled = re.compile(r'%s' % pat)
389 pat_compiled = re.compile(r'%s' % pat)
390 except re.error:
390 except re.error:
391 pat_compiled = None
391 pat_compiled = None
392
392
393 issuetracker_entries[uid] = AttributeDict({
393 issuetracker_entries[uid] = AttributeDict({
394 'pat': pat,
394 'pat': pat,
395 'pat_compiled': pat_compiled,
395 'pat_compiled': pat_compiled,
396 'url': url_cleaner(
396 'url': url_cleaner(
397 qs.get(self._get_keyname('url', uid)) or ''),
397 qs.get(self._get_keyname('url', uid)) or ''),
398 'pref': sanitize_html(
398 'pref': sanitize_html(
399 qs.get(self._get_keyname('pref', uid)) or ''),
399 qs.get(self._get_keyname('pref', uid)) or ''),
400 'desc': qs.get(
400 'desc': qs.get(
401 self._get_keyname('desc', uid)),
401 self._get_keyname('desc', uid)),
402 })
402 })
403
403
404 return issuetracker_entries
404 return issuetracker_entries
405
405
406 def get_global_settings(self, cache=False):
406 def get_global_settings(self, cache=False):
407 """
407 """
408 Returns list of global issue tracker settings
408 Returns list of global issue tracker settings
409 """
409 """
410 defaults = self.global_settings.get_all_settings(cache=cache)
410 defaults = self.global_settings.get_all_settings(cache=cache)
411 settings = self._make_dict_for_settings(defaults)
411 settings = self._make_dict_for_settings(defaults)
412 return settings
412 return settings
413
413
414 def get_repo_settings(self, cache=False):
414 def get_repo_settings(self, cache=False):
415 """
415 """
416 Returns list of issue tracker settings per repository
416 Returns list of issue tracker settings per repository
417 """
417 """
418 if not self.repo_settings:
418 if not self.repo_settings:
419 raise Exception('Repository is not specified')
419 raise Exception('Repository is not specified')
420 all_settings = self.repo_settings.get_all_settings(cache=cache)
420 all_settings = self.repo_settings.get_all_settings(cache=cache)
421 settings = self._make_dict_for_settings(all_settings)
421 settings = self._make_dict_for_settings(all_settings)
422 return settings
422 return settings
423
423
424 def get_settings(self, cache=False):
424 def get_settings(self, cache=False):
425 if self.inherit_global_settings:
425 if self.inherit_global_settings:
426 return self.get_global_settings(cache=cache)
426 return self.get_global_settings(cache=cache)
427 else:
427 else:
428 return self.get_repo_settings(cache=cache)
428 return self.get_repo_settings(cache=cache)
429
429
430 def delete_entries(self, uid):
430 def delete_entries(self, uid):
431 if self.repo_settings:
431 if self.repo_settings:
432 all_patterns = self.get_repo_settings()
432 all_patterns = self.get_repo_settings()
433 settings_model = self.repo_settings
433 settings_model = self.repo_settings
434 else:
434 else:
435 all_patterns = self.get_global_settings()
435 all_patterns = self.get_global_settings()
436 settings_model = self.global_settings
436 settings_model = self.global_settings
437 entries = all_patterns.get(uid, [])
437 entries = all_patterns.get(uid, [])
438
438
439 for del_key in entries:
439 for del_key in entries:
440 setting_name = self._get_keyname(del_key, uid, prefix='')
440 setting_name = self._get_keyname(del_key, uid, prefix='')
441 entry = settings_model.get_setting_by_name(setting_name)
441 entry = settings_model.get_setting_by_name(setting_name)
442 if entry:
442 if entry:
443 Session().delete(entry)
443 Session().delete(entry)
444
444
445 Session().commit()
445 Session().commit()
446
446
447 def create_or_update_setting(
447 def create_or_update_setting(
448 self, name, val=Optional(''), type_=Optional('unicode')):
448 self, name, val=Optional(''), type_=Optional('unicode')):
449 if self.repo_settings:
449 if self.repo_settings:
450 setting = self.repo_settings.create_or_update_setting(
450 setting = self.repo_settings.create_or_update_setting(
451 name, val, type_)
451 name, val, type_)
452 else:
452 else:
453 setting = self.global_settings.create_or_update_setting(
453 setting = self.global_settings.create_or_update_setting(
454 name, val, type_)
454 name, val, type_)
455 return setting
455 return setting
456
456
457
457
458 class VcsSettingsModel(object):
458 class VcsSettingsModel(object):
459
459
460 INHERIT_SETTINGS = 'inherit_vcs_settings'
460 INHERIT_SETTINGS = 'inherit_vcs_settings'
461 GENERAL_SETTINGS = (
461 GENERAL_SETTINGS = (
462 'use_outdated_comments',
462 'use_outdated_comments',
463 'pr_merge_enabled',
463 'pr_merge_enabled',
464 'hg_use_rebase_for_merging',
464 'hg_use_rebase_for_merging',
465 'hg_close_branch_before_merging',
465 'hg_close_branch_before_merging',
466 'git_use_rebase_for_merging',
466 'git_use_rebase_for_merging',
467 'git_close_branch_before_merging',
467 'git_close_branch_before_merging',
468 'diff_cache',
468 'diff_cache',
469 )
469 )
470
470
471 HOOKS_SETTINGS = (
471 HOOKS_SETTINGS = (
472 ('hooks', 'changegroup.repo_size'),
472 ('hooks', 'changegroup.repo_size'),
473 ('hooks', 'changegroup.push_logger'),
473 ('hooks', 'changegroup.push_logger'),
474 ('hooks', 'outgoing.pull_logger'),
474 ('hooks', 'outgoing.pull_logger'),
475 )
475 )
476 HG_SETTINGS = (
476 HG_SETTINGS = (
477 ('extensions', 'largefiles'),
477 ('extensions', 'largefiles'),
478 ('phases', 'publish'),
478 ('phases', 'publish'),
479 ('extensions', 'evolve'),
479 ('extensions', 'evolve'),
480 ('extensions', 'topic'),
480 ('extensions', 'topic'),
481 ('experimental', 'evolution'),
481 ('experimental', 'evolution'),
482 ('experimental', 'evolution.exchange'),
482 ('experimental', 'evolution.exchange'),
483 )
483 )
484 GIT_SETTINGS = (
484 GIT_SETTINGS = (
485 ('vcs_git_lfs', 'enabled'),
485 ('vcs_git_lfs', 'enabled'),
486 )
486 )
487 GLOBAL_HG_SETTINGS = (
487 GLOBAL_HG_SETTINGS = (
488 ('extensions', 'largefiles'),
488 ('extensions', 'largefiles'),
489 ('largefiles', 'usercache'),
489 ('largefiles', 'usercache'),
490 ('phases', 'publish'),
490 ('phases', 'publish'),
491 ('extensions', 'evolve'),
491 ('extensions', 'evolve'),
492 ('extensions', 'topic'),
492 ('extensions', 'topic'),
493 ('experimental', 'evolution'),
493 ('experimental', 'evolution'),
494 ('experimental', 'evolution.exchange'),
494 ('experimental', 'evolution.exchange'),
495 )
495 )
496
496
497 GLOBAL_GIT_SETTINGS = (
497 GLOBAL_GIT_SETTINGS = (
498 ('vcs_git_lfs', 'enabled'),
498 ('vcs_git_lfs', 'enabled'),
499 ('vcs_git_lfs', 'store_location')
499 ('vcs_git_lfs', 'store_location')
500 )
500 )
501
501
502 SVN_BRANCH_SECTION = 'vcs_svn_branch'
502 SVN_BRANCH_SECTION = 'vcs_svn_branch'
503 SVN_TAG_SECTION = 'vcs_svn_tag'
503 SVN_TAG_SECTION = 'vcs_svn_tag'
504 SSL_SETTING = ('web', 'push_ssl')
504 SSL_SETTING = ('web', 'push_ssl')
505 PATH_SETTING = ('paths', '/')
505 PATH_SETTING = ('paths', '/')
506
506
507 def __init__(self, sa=None, repo=None):
507 def __init__(self, sa=None, repo=None):
508 self.global_settings = SettingsModel(sa=sa)
508 self.global_settings = SettingsModel(sa=sa)
509 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
509 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
510 self._ui_settings = (
510 self._ui_settings = (
511 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
511 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
512 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
512 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
513
513
514 @property
514 @property
515 @assert_repo_settings
515 @assert_repo_settings
516 def inherit_global_settings(self):
516 def inherit_global_settings(self):
517 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
517 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
518 return setting.app_settings_value if setting else True
518 return setting.app_settings_value if setting else True
519
519
520 @inherit_global_settings.setter
520 @inherit_global_settings.setter
521 @assert_repo_settings
521 @assert_repo_settings
522 def inherit_global_settings(self, value):
522 def inherit_global_settings(self, value):
523 self.repo_settings.create_or_update_setting(
523 self.repo_settings.create_or_update_setting(
524 self.INHERIT_SETTINGS, value, type_='bool')
524 self.INHERIT_SETTINGS, value, type_='bool')
525
525
526 def get_keyname(self, key_name, prefix='rhodecode_'):
526 def get_keyname(self, key_name, prefix='rhodecode_'):
527 return f'{prefix}{key_name}'
527 return f'{prefix}{key_name}'
528
528
529 def get_global_svn_branch_patterns(self):
529 def get_global_svn_branch_patterns(self):
530 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
530 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
531
531
532 @assert_repo_settings
532 @assert_repo_settings
533 def get_repo_svn_branch_patterns(self):
533 def get_repo_svn_branch_patterns(self):
534 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
534 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
535
535
536 def get_global_svn_tag_patterns(self):
536 def get_global_svn_tag_patterns(self):
537 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
537 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
538
538
539 @assert_repo_settings
539 @assert_repo_settings
540 def get_repo_svn_tag_patterns(self):
540 def get_repo_svn_tag_patterns(self):
541 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
541 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
542
542
543 def get_global_settings(self):
543 def get_global_settings(self):
544 return self._collect_all_settings(global_=True)
544 return self._collect_all_settings(global_=True)
545
545
546 @assert_repo_settings
546 @assert_repo_settings
547 def get_repo_settings(self):
547 def get_repo_settings(self):
548 return self._collect_all_settings(global_=False)
548 return self._collect_all_settings(global_=False)
549
549
550 @assert_repo_settings
550 @assert_repo_settings
551 def get_repo_settings_inherited(self):
551 def get_repo_settings_inherited(self):
552 global_settings = self.get_global_settings()
552 global_settings = self.get_global_settings()
553 global_settings.update(self.get_repo_settings())
553 global_settings.update(self.get_repo_settings())
554 return global_settings
554 return global_settings
555
555
556 @assert_repo_settings
556 @assert_repo_settings
557 def create_or_update_repo_settings(
557 def create_or_update_repo_settings(
558 self, data, inherit_global_settings=False):
558 self, data, inherit_global_settings=False):
559 from rhodecode.model.scm import ScmModel
559 from rhodecode.model.scm import ScmModel
560
560
561 self.inherit_global_settings = inherit_global_settings
561 self.inherit_global_settings = inherit_global_settings
562
562
563 repo = self.repo_settings.get_repo()
563 repo = self.repo_settings.get_repo()
564 if not inherit_global_settings:
564 if not inherit_global_settings:
565 if repo.repo_type == 'svn':
565 if repo.repo_type == 'svn':
566 self.create_repo_svn_settings(data)
566 self.create_repo_svn_settings(data)
567 else:
567 else:
568 self.create_or_update_repo_hook_settings(data)
568 self.create_or_update_repo_hook_settings(data)
569 self.create_or_update_repo_pr_settings(data)
569 self.create_or_update_repo_pr_settings(data)
570
570
571 if repo.repo_type == 'hg':
571 if repo.repo_type == 'hg':
572 self.create_or_update_repo_hg_settings(data)
572 self.create_or_update_repo_hg_settings(data)
573
573
574 if repo.repo_type == 'git':
574 if repo.repo_type == 'git':
575 self.create_or_update_repo_git_settings(data)
575 self.create_or_update_repo_git_settings(data)
576
576
577 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
577 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
578
578
579 @assert_repo_settings
579 @assert_repo_settings
580 def create_or_update_repo_hook_settings(self, data):
580 def create_or_update_repo_hook_settings(self, data):
581 for section, key in self.HOOKS_SETTINGS:
581 for section, key in self.HOOKS_SETTINGS:
582 data_key = self._get_form_ui_key(section, key)
582 data_key = self._get_form_ui_key(section, key)
583 if data_key not in data:
583 if data_key not in data:
584 raise ValueError(
584 raise ValueError(
585 f'The given data does not contain {data_key} key')
585 f'The given data does not contain {data_key} key')
586
586
587 active = data.get(data_key)
587 active = data.get(data_key)
588 repo_setting = self.repo_settings.get_ui_by_section_and_key(
588 repo_setting = self.repo_settings.get_ui_by_section_and_key(
589 section, key)
589 section, key)
590 if not repo_setting:
590 if not repo_setting:
591 global_setting = self.global_settings.\
591 global_setting = self.global_settings.\
592 get_ui_by_section_and_key(section, key)
592 get_ui_by_section_and_key(section, key)
593 self.repo_settings.create_ui_section_value(
593 self.repo_settings.create_ui_section_value(
594 section, global_setting.ui_value, key=key, active=active)
594 section, global_setting.ui_value, key=key, active=active)
595 else:
595 else:
596 repo_setting.ui_active = active
596 repo_setting.ui_active = active
597 Session().add(repo_setting)
597 Session().add(repo_setting)
598
598
599 def update_global_hook_settings(self, data):
599 def update_global_hook_settings(self, data):
600 for section, key in self.HOOKS_SETTINGS:
600 for section, key in self.HOOKS_SETTINGS:
601 data_key = self._get_form_ui_key(section, key)
601 data_key = self._get_form_ui_key(section, key)
602 if data_key not in data:
602 if data_key not in data:
603 raise ValueError(
603 raise ValueError(
604 f'The given data does not contain {data_key} key')
604 f'The given data does not contain {data_key} key')
605 active = data.get(data_key)
605 active = data.get(data_key)
606 repo_setting = self.global_settings.get_ui_by_section_and_key(
606 repo_setting = self.global_settings.get_ui_by_section_and_key(
607 section, key)
607 section, key)
608 repo_setting.ui_active = active
608 repo_setting.ui_active = active
609 Session().add(repo_setting)
609 Session().add(repo_setting)
610
610
611 @assert_repo_settings
611 @assert_repo_settings
612 def create_or_update_repo_pr_settings(self, data):
612 def create_or_update_repo_pr_settings(self, data):
613 return self._create_or_update_general_settings(
613 return self._create_or_update_general_settings(
614 self.repo_settings, data)
614 self.repo_settings, data)
615
615
616 def create_or_update_global_pr_settings(self, data):
616 def create_or_update_global_pr_settings(self, data):
617 return self._create_or_update_general_settings(
617 return self._create_or_update_general_settings(
618 self.global_settings, data)
618 self.global_settings, data)
619
619
620 @assert_repo_settings
620 @assert_repo_settings
621 def create_repo_svn_settings(self, data):
621 def create_repo_svn_settings(self, data):
622 return self._create_svn_settings(self.repo_settings, data)
622 return self._create_svn_settings(self.repo_settings, data)
623
623
624 def _set_evolution(self, settings, is_enabled):
624 def _set_evolution(self, settings, is_enabled):
625 if is_enabled:
625 if is_enabled:
626 # if evolve is active set evolution=all
626 # if evolve is active set evolution=all
627
627
628 self._create_or_update_ui(
628 self._create_or_update_ui(
629 settings, *('experimental', 'evolution'), value='all',
629 settings, *('experimental', 'evolution'), value='all',
630 active=True)
630 active=True)
631 self._create_or_update_ui(
631 self._create_or_update_ui(
632 settings, *('experimental', 'evolution.exchange'), value='yes',
632 settings, *('experimental', 'evolution.exchange'), value='yes',
633 active=True)
633 active=True)
634 # if evolve is active set topics server support
634 # if evolve is active set topics server support
635 self._create_or_update_ui(
635 self._create_or_update_ui(
636 settings, *('extensions', 'topic'), value='',
636 settings, *('extensions', 'topic'), value='',
637 active=True)
637 active=True)
638
638
639 else:
639 else:
640 self._create_or_update_ui(
640 self._create_or_update_ui(
641 settings, *('experimental', 'evolution'), value='',
641 settings, *('experimental', 'evolution'), value='',
642 active=False)
642 active=False)
643 self._create_or_update_ui(
643 self._create_or_update_ui(
644 settings, *('experimental', 'evolution.exchange'), value='no',
644 settings, *('experimental', 'evolution.exchange'), value='no',
645 active=False)
645 active=False)
646 self._create_or_update_ui(
646 self._create_or_update_ui(
647 settings, *('extensions', 'topic'), value='',
647 settings, *('extensions', 'topic'), value='',
648 active=False)
648 active=False)
649
649
650 @assert_repo_settings
650 @assert_repo_settings
651 def create_or_update_repo_hg_settings(self, data):
651 def create_or_update_repo_hg_settings(self, data):
652 largefiles, phases, evolve = \
652 largefiles, phases, evolve = \
653 self.HG_SETTINGS[:3]
653 self.HG_SETTINGS[:3]
654 largefiles_key, phases_key, evolve_key = \
654 largefiles_key, phases_key, evolve_key = \
655 self._get_settings_keys(self.HG_SETTINGS[:3], data)
655 self._get_settings_keys(self.HG_SETTINGS[:3], data)
656
656
657 self._create_or_update_ui(
657 self._create_or_update_ui(
658 self.repo_settings, *largefiles, value='',
658 self.repo_settings, *largefiles, value='',
659 active=data[largefiles_key])
659 active=data[largefiles_key])
660 self._create_or_update_ui(
660 self._create_or_update_ui(
661 self.repo_settings, *evolve, value='',
661 self.repo_settings, *evolve, value='',
662 active=data[evolve_key])
662 active=data[evolve_key])
663 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
663 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
664
664
665 self._create_or_update_ui(
665 self._create_or_update_ui(
666 self.repo_settings, *phases, value=safe_str(data[phases_key]))
666 self.repo_settings, *phases, value=safe_str(data[phases_key]))
667
667
668 def create_or_update_global_hg_settings(self, data):
668 def create_or_update_global_hg_settings(self, data):
669 opts_len = 4
669 opts_len = 4
670 largefiles, largefiles_store, phases, evolve \
670 largefiles, largefiles_store, phases, evolve \
671 = self.GLOBAL_HG_SETTINGS[:opts_len]
671 = self.GLOBAL_HG_SETTINGS[:opts_len]
672 largefiles_key, largefiles_store_key, phases_key, evolve_key \
672 largefiles_key, largefiles_store_key, phases_key, evolve_key \
673 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:opts_len], data)
673 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:opts_len], data)
674
674
675 self._create_or_update_ui(
675 self._create_or_update_ui(
676 self.global_settings, *largefiles, value='',
676 self.global_settings, *largefiles, value='',
677 active=data[largefiles_key])
677 active=data[largefiles_key])
678 self._create_or_update_ui(
678 self._create_or_update_ui(
679 self.global_settings, *largefiles_store, value=data[largefiles_store_key])
679 self.global_settings, *largefiles_store, value=data[largefiles_store_key])
680 self._create_or_update_ui(
680 self._create_or_update_ui(
681 self.global_settings, *phases, value=safe_str(data[phases_key]))
681 self.global_settings, *phases, value=safe_str(data[phases_key]))
682 self._create_or_update_ui(
682 self._create_or_update_ui(
683 self.global_settings, *evolve, value='',
683 self.global_settings, *evolve, value='',
684 active=data[evolve_key])
684 active=data[evolve_key])
685 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
685 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
686
686
687 def create_or_update_repo_git_settings(self, data):
687 def create_or_update_repo_git_settings(self, data):
688 # NOTE(marcink): # comma makes unpack work properly
688 # NOTE(marcink): # comma makes unpack work properly
689 lfs_enabled, \
689 lfs_enabled, \
690 = self.GIT_SETTINGS
690 = self.GIT_SETTINGS
691
691
692 lfs_enabled_key, \
692 lfs_enabled_key, \
693 = self._get_settings_keys(self.GIT_SETTINGS, data)
693 = self._get_settings_keys(self.GIT_SETTINGS, data)
694
694
695 self._create_or_update_ui(
695 self._create_or_update_ui(
696 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
696 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
697 active=data[lfs_enabled_key])
697 active=data[lfs_enabled_key])
698
698
699 def create_or_update_global_git_settings(self, data):
699 def create_or_update_global_git_settings(self, data):
700 lfs_enabled, lfs_store_location \
700 lfs_enabled, lfs_store_location \
701 = self.GLOBAL_GIT_SETTINGS
701 = self.GLOBAL_GIT_SETTINGS
702 lfs_enabled_key, lfs_store_location_key \
702 lfs_enabled_key, lfs_store_location_key \
703 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
703 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
704
704
705 self._create_or_update_ui(
705 self._create_or_update_ui(
706 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
706 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
707 active=data[lfs_enabled_key])
707 active=data[lfs_enabled_key])
708 self._create_or_update_ui(
708 self._create_or_update_ui(
709 self.global_settings, *lfs_store_location,
709 self.global_settings, *lfs_store_location,
710 value=data[lfs_store_location_key])
710 value=data[lfs_store_location_key])
711
711
712 def create_or_update_global_svn_settings(self, data):
712 def create_or_update_global_svn_settings(self, data):
713 # branch/tags patterns
713 # branch/tags patterns
714 self._create_svn_settings(self.global_settings, data)
714 self._create_svn_settings(self.global_settings, data)
715
715
716 def update_global_ssl_setting(self, value):
716 def update_global_ssl_setting(self, value):
717 self._create_or_update_ui(
717 self._create_or_update_ui(
718 self.global_settings, *self.SSL_SETTING, value=value)
718 self.global_settings, *self.SSL_SETTING, value=value)
719
719
720 def update_global_path_setting(self, value):
721 self._create_or_update_ui(
722 self.global_settings, *self.PATH_SETTING, value=value)
723
724 @assert_repo_settings
720 @assert_repo_settings
725 def delete_repo_svn_pattern(self, id_):
721 def delete_repo_svn_pattern(self, id_):
726 ui = self.repo_settings.UiDbModel.get(id_)
722 ui = self.repo_settings.UiDbModel.get(id_)
727 if ui and ui.repository.repo_name == self.repo_settings.repo:
723 if ui and ui.repository.repo_name == self.repo_settings.repo:
728 # only delete if it's the same repo as initialized settings
724 # only delete if it's the same repo as initialized settings
729 self.repo_settings.delete_ui(id_)
725 self.repo_settings.delete_ui(id_)
730 else:
726 else:
731 # raise error as if we wouldn't find this option
727 # raise error as if we wouldn't find this option
732 self.repo_settings.delete_ui(-1)
728 self.repo_settings.delete_ui(-1)
733
729
734 def delete_global_svn_pattern(self, id_):
730 def delete_global_svn_pattern(self, id_):
735 self.global_settings.delete_ui(id_)
731 self.global_settings.delete_ui(id_)
736
732
737 @assert_repo_settings
733 @assert_repo_settings
738 def get_repo_ui_settings(self, section=None, key=None):
734 def get_repo_ui_settings(self, section=None, key=None):
739 global_uis = self.global_settings.get_ui(section, key)
735 global_uis = self.global_settings.get_ui(section, key)
740 repo_uis = self.repo_settings.get_ui(section, key)
736 repo_uis = self.repo_settings.get_ui(section, key)
741
737
742 filtered_repo_uis = self._filter_ui_settings(repo_uis)
738 filtered_repo_uis = self._filter_ui_settings(repo_uis)
743 filtered_repo_uis_keys = [
739 filtered_repo_uis_keys = [
744 (s.section, s.key) for s in filtered_repo_uis]
740 (s.section, s.key) for s in filtered_repo_uis]
745
741
746 def _is_global_ui_filtered(ui):
742 def _is_global_ui_filtered(ui):
747 return (
743 return (
748 (ui.section, ui.key) in filtered_repo_uis_keys
744 (ui.section, ui.key) in filtered_repo_uis_keys
749 or ui.section in self._svn_sections)
745 or ui.section in self._svn_sections)
750
746
751 filtered_global_uis = [
747 filtered_global_uis = [
752 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
748 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
753
749
754 return filtered_global_uis + filtered_repo_uis
750 return filtered_global_uis + filtered_repo_uis
755
751
756 def get_global_ui_settings(self, section=None, key=None):
752 def get_global_ui_settings(self, section=None, key=None):
757 return self.global_settings.get_ui(section, key)
753 return self.global_settings.get_ui(section, key)
758
754
759 def get_ui_settings_as_config_obj(self, section=None, key=None):
755 def get_ui_settings_as_config_obj(self, section=None, key=None):
760 config = base.Config()
756 config = base.Config()
761
757
762 ui_settings = self.get_ui_settings(section=section, key=key)
758 ui_settings = self.get_ui_settings(section=section, key=key)
763
759
764 for entry in ui_settings:
760 for entry in ui_settings:
765 config.set(entry.section, entry.key, entry.value)
761 config.set(entry.section, entry.key, entry.value)
766
762
767 return config
763 return config
768
764
769 def get_ui_settings(self, section=None, key=None):
765 def get_ui_settings(self, section=None, key=None):
770 if not self.repo_settings or self.inherit_global_settings:
766 if not self.repo_settings or self.inherit_global_settings:
771 return self.get_global_ui_settings(section, key)
767 return self.get_global_ui_settings(section, key)
772 else:
768 else:
773 return self.get_repo_ui_settings(section, key)
769 return self.get_repo_ui_settings(section, key)
774
770
775 def get_svn_patterns(self, section=None):
771 def get_svn_patterns(self, section=None):
776 if not self.repo_settings:
772 if not self.repo_settings:
777 return self.get_global_ui_settings(section)
773 return self.get_global_ui_settings(section)
778 else:
774 else:
779 return self.get_repo_ui_settings(section)
775 return self.get_repo_ui_settings(section)
780
776
781 @assert_repo_settings
777 @assert_repo_settings
782 def get_repo_general_settings(self):
778 def get_repo_general_settings(self):
783 global_settings = self.global_settings.get_all_settings()
779 global_settings = self.global_settings.get_all_settings()
784 repo_settings = self.repo_settings.get_all_settings()
780 repo_settings = self.repo_settings.get_all_settings()
785 filtered_repo_settings = self._filter_general_settings(repo_settings)
781 filtered_repo_settings = self._filter_general_settings(repo_settings)
786 global_settings.update(filtered_repo_settings)
782 global_settings.update(filtered_repo_settings)
787 return global_settings
783 return global_settings
788
784
789 def get_global_general_settings(self):
785 def get_global_general_settings(self):
790 return self.global_settings.get_all_settings()
786 return self.global_settings.get_all_settings()
791
787
792 def get_general_settings(self):
788 def get_general_settings(self):
793 if not self.repo_settings or self.inherit_global_settings:
789 if not self.repo_settings or self.inherit_global_settings:
794 return self.get_global_general_settings()
790 return self.get_global_general_settings()
795 else:
791 else:
796 return self.get_repo_general_settings()
792 return self.get_repo_general_settings()
797
793
798 def get_repos_location(self):
799 return self.global_settings.get_ui_by_key('/').ui_value
800
801 def _filter_ui_settings(self, settings):
794 def _filter_ui_settings(self, settings):
802 filtered_settings = [
795 filtered_settings = [
803 s for s in settings if self._should_keep_setting(s)]
796 s for s in settings if self._should_keep_setting(s)]
804 return filtered_settings
797 return filtered_settings
805
798
806 def _should_keep_setting(self, setting):
799 def _should_keep_setting(self, setting):
807 keep = (
800 keep = (
808 (setting.section, setting.key) in self._ui_settings or
801 (setting.section, setting.key) in self._ui_settings or
809 setting.section in self._svn_sections)
802 setting.section in self._svn_sections)
810 return keep
803 return keep
811
804
812 def _filter_general_settings(self, settings):
805 def _filter_general_settings(self, settings):
813 keys = [self.get_keyname(key) for key in self.GENERAL_SETTINGS]
806 keys = [self.get_keyname(key) for key in self.GENERAL_SETTINGS]
814 return {
807 return {
815 k: settings[k]
808 k: settings[k]
816 for k in settings if k in keys}
809 for k in settings if k in keys}
817
810
818 def _collect_all_settings(self, global_=False):
811 def _collect_all_settings(self, global_=False):
819 settings = self.global_settings if global_ else self.repo_settings
812 settings = self.global_settings if global_ else self.repo_settings
820 result = {}
813 result = {}
821
814
822 for section, key in self._ui_settings:
815 for section, key in self._ui_settings:
823 ui = settings.get_ui_by_section_and_key(section, key)
816 ui = settings.get_ui_by_section_and_key(section, key)
824 result_key = self._get_form_ui_key(section, key)
817 result_key = self._get_form_ui_key(section, key)
825
818
826 if ui:
819 if ui:
827 if section in ('hooks', 'extensions'):
820 if section in ('hooks', 'extensions'):
828 result[result_key] = ui.ui_active
821 result[result_key] = ui.ui_active
829 elif result_key in ['vcs_git_lfs_enabled']:
822 elif result_key in ['vcs_git_lfs_enabled']:
830 result[result_key] = ui.ui_active
823 result[result_key] = ui.ui_active
831 else:
824 else:
832 result[result_key] = ui.ui_value
825 result[result_key] = ui.ui_value
833
826
834 for name in self.GENERAL_SETTINGS:
827 for name in self.GENERAL_SETTINGS:
835 setting = settings.get_setting_by_name(name)
828 setting = settings.get_setting_by_name(name)
836 if setting:
829 if setting:
837 result_key = self.get_keyname(name)
830 result_key = self.get_keyname(name)
838 result[result_key] = setting.app_settings_value
831 result[result_key] = setting.app_settings_value
839
832
840 return result
833 return result
841
834
842 def _get_form_ui_key(self, section, key):
835 def _get_form_ui_key(self, section, key):
843 return '{section}_{key}'.format(
836 return '{section}_{key}'.format(
844 section=section, key=key.replace('.', '_'))
837 section=section, key=key.replace('.', '_'))
845
838
846 def _create_or_update_ui(
839 def _create_or_update_ui(
847 self, settings, section, key, value=None, active=None):
840 self, settings, section, key, value=None, active=None):
848 ui = settings.get_ui_by_section_and_key(section, key)
841 ui = settings.get_ui_by_section_and_key(section, key)
849 if not ui:
842 if not ui:
850 active = True if active is None else active
843 active = True if active is None else active
851 settings.create_ui_section_value(
844 settings.create_ui_section_value(
852 section, value, key=key, active=active)
845 section, value, key=key, active=active)
853 else:
846 else:
854 if active is not None:
847 if active is not None:
855 ui.ui_active = active
848 ui.ui_active = active
856 if value is not None:
849 if value is not None:
857 ui.ui_value = value
850 ui.ui_value = value
858 Session().add(ui)
851 Session().add(ui)
859
852
860 def _create_svn_settings(self, settings, data):
853 def _create_svn_settings(self, settings, data):
861 svn_settings = {
854 svn_settings = {
862 'new_svn_branch': self.SVN_BRANCH_SECTION,
855 'new_svn_branch': self.SVN_BRANCH_SECTION,
863 'new_svn_tag': self.SVN_TAG_SECTION
856 'new_svn_tag': self.SVN_TAG_SECTION
864 }
857 }
865 for key in svn_settings:
858 for key in svn_settings:
866 if data.get(key):
859 if data.get(key):
867 settings.create_ui_section_value(svn_settings[key], data[key])
860 settings.create_ui_section_value(svn_settings[key], data[key])
868
861
869 def _create_or_update_general_settings(self, settings, data):
862 def _create_or_update_general_settings(self, settings, data):
870 for name in self.GENERAL_SETTINGS:
863 for name in self.GENERAL_SETTINGS:
871 data_key = self.get_keyname(name)
864 data_key = self.get_keyname(name)
872 if data_key not in data:
865 if data_key not in data:
873 raise ValueError(
866 raise ValueError(
874 f'The given data does not contain {data_key} key')
867 f'The given data does not contain {data_key} key')
875 setting = settings.create_or_update_setting(
868 setting = settings.create_or_update_setting(
876 name, data[data_key], 'bool')
869 name, data[data_key], 'bool')
877 Session().add(setting)
870 Session().add(setting)
878
871
879 def _get_settings_keys(self, settings, data):
872 def _get_settings_keys(self, settings, data):
880 data_keys = [self._get_form_ui_key(*s) for s in settings]
873 data_keys = [self._get_form_ui_key(*s) for s in settings]
881 for data_key in data_keys:
874 for data_key in data_keys:
882 if data_key not in data:
875 if data_key not in data:
883 raise ValueError(
876 raise ValueError(
884 f'The given data does not contain {data_key} key')
877 f'The given data does not contain {data_key} key')
885 return data_keys
878 return data_keys
886
879
887 def create_largeobjects_dirs_if_needed(self, repo_store_path):
880 def create_largeobjects_dirs_if_needed(self, repo_store_path):
888 """
881 """
889 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
882 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
890 does a repository scan if enabled in the settings.
883 does a repository scan if enabled in the settings.
891 """
884 """
892
885
893 from rhodecode.lib.vcs.backends.hg import largefiles_store
886 from rhodecode.lib.vcs.backends.hg import largefiles_store
894 from rhodecode.lib.vcs.backends.git import lfs_store
887 from rhodecode.lib.vcs.backends.git import lfs_store
895
888
896 paths = [
889 paths = [
897 largefiles_store(repo_store_path),
890 largefiles_store(repo_store_path),
898 lfs_store(repo_store_path)]
891 lfs_store(repo_store_path)]
899
892
900 for path in paths:
893 for path in paths:
901 if os.path.isdir(path):
894 if os.path.isdir(path):
902 continue
895 continue
903 if os.path.isfile(path):
896 if os.path.isfile(path):
904 continue
897 continue
905 # not a file nor dir, we try to create it
898 # not a file nor dir, we try to create it
906 try:
899 try:
907 os.makedirs(path)
900 os.makedirs(path)
908 except Exception:
901 except Exception:
909 log.warning('Failed to create largefiles dir:%s', path)
902 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,398 +1,399 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 import io
18 import io
19 import shlex
19 import shlex
20
20
21 import math
21 import math
22 import re
22 import re
23 import os
23 import os
24 import datetime
24 import datetime
25 import logging
25 import logging
26 import queue
26 import queue
27 import subprocess
27 import subprocess
28
28
29
29
30 from dateutil.parser import parse
30 from dateutil.parser import parse
31 from pyramid.interfaces import IRoutesMapper
31 from pyramid.interfaces import IRoutesMapper
32 from pyramid.settings import asbool
32 from pyramid.settings import asbool
33 from pyramid.path import AssetResolver
33 from pyramid.path import AssetResolver
34 from threading import Thread
34 from threading import Thread
35
35
36 from rhodecode.config.jsroutes import generate_jsroutes_content
36 from rhodecode.config.jsroutes import generate_jsroutes_content
37 from rhodecode.lib.base import get_auth_user
37 from rhodecode.lib.base import get_auth_user
38 from rhodecode.lib.celerylib.loader import set_celery_conf
38 from rhodecode.lib.celerylib.loader import set_celery_conf
39
39
40 import rhodecode
40 import rhodecode
41
41
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 def add_renderer_globals(event):
46 def add_renderer_globals(event):
47 from rhodecode.lib import helpers
47 from rhodecode.lib import helpers
48
48
49 # TODO: When executed in pyramid view context the request is not available
49 # TODO: When executed in pyramid view context the request is not available
50 # in the event. Find a better solution to get the request.
50 # in the event. Find a better solution to get the request.
51 from pyramid.threadlocal import get_current_request
51 from pyramid.threadlocal import get_current_request
52 request = event['request'] or get_current_request()
52 request = event['request'] or get_current_request()
53
53
54 # Add Pyramid translation as '_' to context
54 # Add Pyramid translation as '_' to context
55 event['_'] = request.translate
55 event['_'] = request.translate
56 event['_ungettext'] = request.plularize
56 event['_ungettext'] = request.plularize
57 event['h'] = helpers
57 event['h'] = helpers
58
58
59
59
60 def set_user_lang(event):
60 def set_user_lang(event):
61 request = event.request
61 request = event.request
62 cur_user = getattr(request, 'user', None)
62 cur_user = getattr(request, 'user', None)
63
63
64 if cur_user:
64 if cur_user:
65 user_lang = cur_user.get_instance().user_data.get('language')
65 user_lang = cur_user.get_instance().user_data.get('language')
66 if user_lang:
66 if user_lang:
67 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
67 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
68 event.request._LOCALE_ = user_lang
68 event.request._LOCALE_ = user_lang
69
69
70
70
71 def update_celery_conf(event):
71 def update_celery_conf(event):
72 log.debug('Setting celery config from new request')
72 log.debug('Setting celery config from new request')
73 set_celery_conf(request=event.request, registry=event.request.registry)
73 set_celery_conf(request=event.request, registry=event.request.registry)
74
74
75
75
76 def add_request_user_context(event):
76 def add_request_user_context(event):
77 """
77 """
78 Adds auth user into request context
78 Adds auth user into request context
79 """
79 """
80
80
81 request = event.request
81 request = event.request
82 # access req_id as soon as possible
82 # access req_id as soon as possible
83 req_id = request.req_id
83 req_id = request.req_id
84
84
85 if hasattr(request, 'vcs_call'):
85 if hasattr(request, 'vcs_call'):
86 # skip vcs calls
86 # skip vcs calls
87 return
87 return
88
88
89 if hasattr(request, 'rpc_method'):
89 if hasattr(request, 'rpc_method'):
90 # skip api calls
90 # skip api calls
91 return
91 return
92
92
93 auth_user, auth_token = get_auth_user(request)
93 auth_user, auth_token = get_auth_user(request)
94 request.user = auth_user
94 request.user = auth_user
95 request.user_auth_token = auth_token
95 request.user_auth_token = auth_token
96 request.environ['rc_auth_user'] = auth_user
96 request.environ['rc_auth_user'] = auth_user
97 request.environ['rc_auth_user_id'] = str(auth_user.user_id)
97 request.environ['rc_auth_user_id'] = str(auth_user.user_id)
98 request.environ['rc_req_id'] = req_id
98 request.environ['rc_req_id'] = req_id
99
99
100
100
101 def reset_log_bucket(event):
101 def reset_log_bucket(event):
102 """
102 """
103 reset the log bucket on new request
103 reset the log bucket on new request
104 """
104 """
105 request = event.request
105 request = event.request
106 request.req_id_records_init()
106 request.req_id_records_init()
107
107
108
108
109 def scan_repositories_if_enabled(event):
109 def scan_repositories_if_enabled(event):
110 """
110 """
111 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
111 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
112 does a repository scan if enabled in the settings.
112 does a repository scan if enabled in the settings.
113 """
113 """
114 settings = event.app.registry.settings
114 settings = event.app.registry.settings
115 vcs_server_enabled = settings['vcs.server.enable']
115 vcs_server_enabled = settings['vcs.server.enable']
116 import_on_startup = settings['startup.import_repos']
116 import_on_startup = settings['startup.import_repos']
117 if vcs_server_enabled and import_on_startup:
117 if vcs_server_enabled and import_on_startup:
118 from rhodecode.model.scm import ScmModel
118 from rhodecode.model.scm import ScmModel
119 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path
119 from rhodecode.lib.utils import repo2db_mapper
120 repositories = ScmModel().repo_scan(get_rhodecode_base_path())
120 scm = ScmModel()
121 repositories = scm.repo_scan(scm.repos_path)
121 repo2db_mapper(repositories, remove_obsolete=False)
122 repo2db_mapper(repositories, remove_obsolete=False)
122
123
123
124
124 def write_metadata_if_needed(event):
125 def write_metadata_if_needed(event):
125 """
126 """
126 Writes upgrade metadata
127 Writes upgrade metadata
127 """
128 """
128 import rhodecode
129 import rhodecode
129 from rhodecode.lib import system_info
130 from rhodecode.lib import system_info
130 from rhodecode.lib import ext_json
131 from rhodecode.lib import ext_json
131
132
132 fname = '.rcmetadata.json'
133 fname = '.rcmetadata.json'
133 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
134 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
134 metadata_destination = os.path.join(ini_loc, fname)
135 metadata_destination = os.path.join(ini_loc, fname)
135
136
136 def get_update_age():
137 def get_update_age():
137 now = datetime.datetime.utcnow()
138 now = datetime.datetime.utcnow()
138
139
139 with open(metadata_destination, 'rb') as f:
140 with open(metadata_destination, 'rb') as f:
140 data = ext_json.json.loads(f.read())
141 data = ext_json.json.loads(f.read())
141 if 'created_on' in data:
142 if 'created_on' in data:
142 update_date = parse(data['created_on'])
143 update_date = parse(data['created_on'])
143 diff = now - update_date
144 diff = now - update_date
144 return diff.total_seconds() / 60.0
145 return diff.total_seconds() / 60.0
145
146
146 return 0
147 return 0
147
148
148 def write():
149 def write():
149 configuration = system_info.SysInfo(
150 configuration = system_info.SysInfo(
150 system_info.rhodecode_config)()['value']
151 system_info.rhodecode_config)()['value']
151 license_token = configuration['config']['license_token']
152 license_token = configuration['config']['license_token']
152
153
153 setup = dict(
154 setup = dict(
154 workers=configuration['config']['server:main'].get(
155 workers=configuration['config']['server:main'].get(
155 'workers', '?'),
156 'workers', '?'),
156 worker_type=configuration['config']['server:main'].get(
157 worker_type=configuration['config']['server:main'].get(
157 'worker_class', 'sync'),
158 'worker_class', 'sync'),
158 )
159 )
159 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
160 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
160 del dbinfo['url']
161 del dbinfo['url']
161
162
162 metadata = dict(
163 metadata = dict(
163 desc='upgrade metadata info',
164 desc='upgrade metadata info',
164 license_token=license_token,
165 license_token=license_token,
165 created_on=datetime.datetime.utcnow().isoformat(),
166 created_on=datetime.datetime.utcnow().isoformat(),
166 usage=system_info.SysInfo(system_info.usage_info)()['value'],
167 usage=system_info.SysInfo(system_info.usage_info)()['value'],
167 platform=system_info.SysInfo(system_info.platform_type)()['value'],
168 platform=system_info.SysInfo(system_info.platform_type)()['value'],
168 database=dbinfo,
169 database=dbinfo,
169 cpu=system_info.SysInfo(system_info.cpu)()['value'],
170 cpu=system_info.SysInfo(system_info.cpu)()['value'],
170 memory=system_info.SysInfo(system_info.memory)()['value'],
171 memory=system_info.SysInfo(system_info.memory)()['value'],
171 setup=setup
172 setup=setup
172 )
173 )
173
174
174 with open(metadata_destination, 'wb') as f:
175 with open(metadata_destination, 'wb') as f:
175 f.write(ext_json.json.dumps(metadata))
176 f.write(ext_json.json.dumps(metadata))
176
177
177 settings = event.app.registry.settings
178 settings = event.app.registry.settings
178 if settings.get('metadata.skip'):
179 if settings.get('metadata.skip'):
179 return
180 return
180
181
181 # only write this every 24h, workers restart caused unwanted delays
182 # only write this every 24h, workers restart caused unwanted delays
182 try:
183 try:
183 age_in_min = get_update_age()
184 age_in_min = get_update_age()
184 except Exception:
185 except Exception:
185 age_in_min = 0
186 age_in_min = 0
186
187
187 if age_in_min > 60 * 60 * 24:
188 if age_in_min > 60 * 60 * 24:
188 return
189 return
189
190
190 try:
191 try:
191 write()
192 write()
192 except Exception:
193 except Exception:
193 pass
194 pass
194
195
195
196
196 def write_usage_data(event):
197 def write_usage_data(event):
197 import rhodecode
198 import rhodecode
198 from rhodecode.lib import system_info
199 from rhodecode.lib import system_info
199 from rhodecode.lib import ext_json
200 from rhodecode.lib import ext_json
200
201
201 settings = event.app.registry.settings
202 settings = event.app.registry.settings
202 instance_tag = settings.get('metadata.write_usage_tag')
203 instance_tag = settings.get('metadata.write_usage_tag')
203 if not settings.get('metadata.write_usage'):
204 if not settings.get('metadata.write_usage'):
204 return
205 return
205
206
206 def get_update_age(dest_file):
207 def get_update_age(dest_file):
207 now = datetime.datetime.utcnow()
208 now = datetime.datetime.utcnow()
208
209
209 with open(dest_file, 'rb') as f:
210 with open(dest_file, 'rb') as f:
210 data = ext_json.json.loads(f.read())
211 data = ext_json.json.loads(f.read())
211 if 'created_on' in data:
212 if 'created_on' in data:
212 update_date = parse(data['created_on'])
213 update_date = parse(data['created_on'])
213 diff = now - update_date
214 diff = now - update_date
214 return math.ceil(diff.total_seconds() / 60.0)
215 return math.ceil(diff.total_seconds() / 60.0)
215
216
216 return 0
217 return 0
217
218
218 utc_date = datetime.datetime.utcnow()
219 utc_date = datetime.datetime.utcnow()
219 hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.))
220 hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.))
220 fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json'.format(
221 fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json'.format(
221 date=utc_date, hour=hour_quarter)
222 date=utc_date, hour=hour_quarter)
222 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
223 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
223
224
224 usage_dir = os.path.join(ini_loc, '.rcusage')
225 usage_dir = os.path.join(ini_loc, '.rcusage')
225 if not os.path.isdir(usage_dir):
226 if not os.path.isdir(usage_dir):
226 os.makedirs(usage_dir)
227 os.makedirs(usage_dir)
227 usage_metadata_destination = os.path.join(usage_dir, fname)
228 usage_metadata_destination = os.path.join(usage_dir, fname)
228
229
229 try:
230 try:
230 age_in_min = get_update_age(usage_metadata_destination)
231 age_in_min = get_update_age(usage_metadata_destination)
231 except Exception:
232 except Exception:
232 age_in_min = 0
233 age_in_min = 0
233
234
234 # write every 6th hour
235 # write every 6th hour
235 if age_in_min and age_in_min < 60 * 6:
236 if age_in_min and age_in_min < 60 * 6:
236 log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...',
237 log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...',
237 age_in_min, 60 * 6)
238 age_in_min, 60 * 6)
238 return
239 return
239
240
240 def write(dest_file):
241 def write(dest_file):
241 configuration = system_info.SysInfo(system_info.rhodecode_config)()['value']
242 configuration = system_info.SysInfo(system_info.rhodecode_config)()['value']
242 license_token = configuration['config']['license_token']
243 license_token = configuration['config']['license_token']
243
244
244 metadata = dict(
245 metadata = dict(
245 desc='Usage data',
246 desc='Usage data',
246 instance_tag=instance_tag,
247 instance_tag=instance_tag,
247 license_token=license_token,
248 license_token=license_token,
248 created_on=datetime.datetime.utcnow().isoformat(),
249 created_on=datetime.datetime.utcnow().isoformat(),
249 usage=system_info.SysInfo(system_info.usage_info)()['value'],
250 usage=system_info.SysInfo(system_info.usage_info)()['value'],
250 )
251 )
251
252
252 with open(dest_file, 'wb') as f:
253 with open(dest_file, 'wb') as f:
253 f.write(ext_json.formatted_json(metadata))
254 f.write(ext_json.formatted_json(metadata))
254
255
255 try:
256 try:
256 log.debug('Writing usage file at: %s', usage_metadata_destination)
257 log.debug('Writing usage file at: %s', usage_metadata_destination)
257 write(usage_metadata_destination)
258 write(usage_metadata_destination)
258 except Exception:
259 except Exception:
259 pass
260 pass
260
261
261
262
262 def write_js_routes_if_enabled(event):
263 def write_js_routes_if_enabled(event):
263 registry = event.app.registry
264 registry = event.app.registry
264
265
265 mapper = registry.queryUtility(IRoutesMapper)
266 mapper = registry.queryUtility(IRoutesMapper)
266 _argument_prog = re.compile(r'\{(.*?)\}|:\((.*)\)')
267 _argument_prog = re.compile(r'\{(.*?)\}|:\((.*)\)')
267
268
268 def _extract_route_information(route):
269 def _extract_route_information(route):
269 """
270 """
270 Convert a route into tuple(name, path, args), eg:
271 Convert a route into tuple(name, path, args), eg:
271 ('show_user', '/profile/%(username)s', ['username'])
272 ('show_user', '/profile/%(username)s', ['username'])
272 """
273 """
273
274
274 route_path = route.pattern
275 route_path = route.pattern
275 pattern = route.pattern
276 pattern = route.pattern
276
277
277 def replace(matchobj):
278 def replace(matchobj):
278 if matchobj.group(1):
279 if matchobj.group(1):
279 return "%%(%s)s" % matchobj.group(1).split(':')[0]
280 return "%%(%s)s" % matchobj.group(1).split(':')[0]
280 else:
281 else:
281 return "%%(%s)s" % matchobj.group(2)
282 return "%%(%s)s" % matchobj.group(2)
282
283
283 route_path = _argument_prog.sub(replace, route_path)
284 route_path = _argument_prog.sub(replace, route_path)
284
285
285 if not route_path.startswith('/'):
286 if not route_path.startswith('/'):
286 route_path = f'/{route_path}'
287 route_path = f'/{route_path}'
287
288
288 return (
289 return (
289 route.name,
290 route.name,
290 route_path,
291 route_path,
291 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
292 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
292 for arg in _argument_prog.findall(pattern)]
293 for arg in _argument_prog.findall(pattern)]
293 )
294 )
294
295
295 def get_routes():
296 def get_routes():
296 # pyramid routes
297 # pyramid routes
297 for route in mapper.get_routes():
298 for route in mapper.get_routes():
298 if not route.name.startswith('__'):
299 if not route.name.startswith('__'):
299 yield _extract_route_information(route)
300 yield _extract_route_information(route)
300
301
301 if asbool(registry.settings.get('generate_js_files', 'false')):
302 if asbool(registry.settings.get('generate_js_files', 'false')):
302 static_path = AssetResolver().resolve('rhodecode:public').abspath()
303 static_path = AssetResolver().resolve('rhodecode:public').abspath()
303 jsroutes = get_routes()
304 jsroutes = get_routes()
304 jsroutes_file_content = generate_jsroutes_content(jsroutes)
305 jsroutes_file_content = generate_jsroutes_content(jsroutes)
305 jsroutes_file_path = os.path.join(
306 jsroutes_file_path = os.path.join(
306 static_path, 'js', 'rhodecode', 'routes.js')
307 static_path, 'js', 'rhodecode', 'routes.js')
307
308
308 try:
309 try:
309 with open(jsroutes_file_path, 'w', encoding='utf-8') as f:
310 with open(jsroutes_file_path, 'w', encoding='utf-8') as f:
310 f.write(jsroutes_file_content)
311 f.write(jsroutes_file_content)
311 log.debug('generated JS files in %s', jsroutes_file_path)
312 log.debug('generated JS files in %s', jsroutes_file_path)
312 except Exception:
313 except Exception:
313 log.exception('Failed to write routes.js into %s', jsroutes_file_path)
314 log.exception('Failed to write routes.js into %s', jsroutes_file_path)
314
315
315
316
316 class Subscriber(object):
317 class Subscriber(object):
317 """
318 """
318 Base class for subscribers to the pyramid event system.
319 Base class for subscribers to the pyramid event system.
319 """
320 """
320 def __call__(self, event):
321 def __call__(self, event):
321 self.run(event)
322 self.run(event)
322
323
323 def run(self, event):
324 def run(self, event):
324 raise NotImplementedError('Subclass has to implement this.')
325 raise NotImplementedError('Subclass has to implement this.')
325
326
326
327
327 class AsyncSubscriber(Subscriber):
328 class AsyncSubscriber(Subscriber):
328 """
329 """
329 Subscriber that handles the execution of events in a separate task to not
330 Subscriber that handles the execution of events in a separate task to not
330 block the execution of the code which triggers the event. It puts the
331 block the execution of the code which triggers the event. It puts the
331 received events into a queue from which the worker process takes them in
332 received events into a queue from which the worker process takes them in
332 order.
333 order.
333 """
334 """
334 def __init__(self):
335 def __init__(self):
335 self._stop = False
336 self._stop = False
336 self._eventq = queue.Queue()
337 self._eventq = queue.Queue()
337 self._worker = self.create_worker()
338 self._worker = self.create_worker()
338 self._worker.start()
339 self._worker.start()
339
340
340 def __call__(self, event):
341 def __call__(self, event):
341 self._eventq.put(event)
342 self._eventq.put(event)
342
343
343 def create_worker(self):
344 def create_worker(self):
344 worker = Thread(target=self.do_work)
345 worker = Thread(target=self.do_work)
345 worker.daemon = True
346 worker.daemon = True
346 return worker
347 return worker
347
348
348 def stop_worker(self):
349 def stop_worker(self):
349 self._stop = False
350 self._stop = False
350 self._eventq.put(None)
351 self._eventq.put(None)
351 self._worker.join()
352 self._worker.join()
352
353
353 def do_work(self):
354 def do_work(self):
354 while not self._stop:
355 while not self._stop:
355 event = self._eventq.get()
356 event = self._eventq.get()
356 if event is not None:
357 if event is not None:
357 self.run(event)
358 self.run(event)
358
359
359
360
360 class AsyncSubprocessSubscriber(AsyncSubscriber):
361 class AsyncSubprocessSubscriber(AsyncSubscriber):
361 """
362 """
362 Subscriber that uses the subprocess module to execute a command if an
363 Subscriber that uses the subprocess module to execute a command if an
363 event is received. Events are handled asynchronously::
364 event is received. Events are handled asynchronously::
364
365
365 subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10)
366 subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10)
366 subscriber(dummyEvent) # running __call__(event)
367 subscriber(dummyEvent) # running __call__(event)
367
368
368 """
369 """
369
370
370 def __init__(self, cmd, timeout=None):
371 def __init__(self, cmd, timeout=None):
371 if not isinstance(cmd, (list, tuple)):
372 if not isinstance(cmd, (list, tuple)):
372 cmd = shlex.split(cmd)
373 cmd = shlex.split(cmd)
373 super().__init__()
374 super().__init__()
374 self._cmd = cmd
375 self._cmd = cmd
375 self._timeout = timeout
376 self._timeout = timeout
376
377
377 def run(self, event):
378 def run(self, event):
378 cmd = self._cmd
379 cmd = self._cmd
379 timeout = self._timeout
380 timeout = self._timeout
380 log.debug('Executing command %s.', cmd)
381 log.debug('Executing command %s.', cmd)
381
382
382 try:
383 try:
383 output = subprocess.check_output(
384 output = subprocess.check_output(
384 cmd, timeout=timeout, stderr=subprocess.STDOUT)
385 cmd, timeout=timeout, stderr=subprocess.STDOUT)
385 log.debug('Command finished %s', cmd)
386 log.debug('Command finished %s', cmd)
386 if output:
387 if output:
387 log.debug('Command output: %s', output)
388 log.debug('Command output: %s', output)
388 except subprocess.TimeoutExpired as e:
389 except subprocess.TimeoutExpired as e:
389 log.exception('Timeout while executing command.')
390 log.exception('Timeout while executing command.')
390 if e.output:
391 if e.output:
391 log.error('Command output: %s', e.output)
392 log.error('Command output: %s', e.output)
392 except subprocess.CalledProcessError as e:
393 except subprocess.CalledProcessError as e:
393 log.exception('Error while executing command.')
394 log.exception('Error while executing command.')
394 if e.output:
395 if e.output:
395 log.error('Command output: %s', e.output)
396 log.error('Command output: %s', e.output)
396 except Exception:
397 except Exception:
397 log.exception(
398 log.exception(
398 'Exception while executing command %s.', cmd)
399 'Exception while executing command %s.', cmd)
@@ -1,65 +1,64 b''
1 <%namespace name="vcss" file="/base/vcs_settings.mako"/>
1 <%namespace name="vcss" file="/base/vcs_settings.mako"/>
2
2
3 ${h.secure_form(h.route_path('admin_settings_vcs_update'), request=request)}
3 ${h.secure_form(h.route_path('admin_settings_vcs_update'), request=request)}
4 <div>
4 <div>
5 ${vcss.vcs_settings_fields(
5 ${vcss.vcs_settings_fields(
6 suffix='',
6 suffix='',
7 svn_tag_patterns=c.svn_tag_patterns,
7 svn_tag_patterns=c.svn_tag_patterns,
8 svn_branch_patterns=c.svn_branch_patterns,
8 svn_branch_patterns=c.svn_branch_patterns,
9 display_globals=True,
9 display_globals=True
10 allow_repo_location_change=c.visual.allow_repo_location_change
11 )}
10 )}
12 <div class="buttons">
11 <div class="buttons">
13 ${h.submit('save',_('Save settings'),class_="btn")}
12 ${h.submit('save',_('Save settings'),class_="btn")}
14 ${h.reset('reset',_('Reset'),class_="btn")}
13 ${h.reset('reset',_('Reset'),class_="btn")}
15 </div>
14 </div>
16 </div>
15 </div>
17 ${h.end_form()}
16 ${h.end_form()}
18
17
19 <script type="text/javascript">
18 <script type="text/javascript">
20
19
21 function ajaxDeletePattern(pattern_id, field_id) {
20 function ajaxDeletePattern(pattern_id, field_id) {
22 var sUrl = "${h.route_path('admin_settings_vcs_svn_pattern_delete')}";
21 var sUrl = "${h.route_path('admin_settings_vcs_svn_pattern_delete')}";
23 var callback = function (o) {
22 var callback = function (o) {
24 var elem = $("#"+field_id);
23 var elem = $("#"+field_id);
25 elem.remove();
24 elem.remove();
26 };
25 };
27 var postData = {
26 var postData = {
28 'delete_svn_pattern': pattern_id,
27 'delete_svn_pattern': pattern_id,
29 'csrf_token': CSRF_TOKEN
28 'csrf_token': CSRF_TOKEN
30 };
29 };
31 var request = $.post(sUrl, postData)
30 var request = $.post(sUrl, postData)
32 .done(callback)
31 .done(callback)
33 .fail(function (data, textStatus, errorThrown) {
32 .fail(function (data, textStatus, errorThrown) {
34 alert("Error while deleting hooks.\nError code {0} ({1}). URL: {2}".format(data.status,data.statusText,$(this)[0].url));
33 alert("Error while deleting hooks.\nError code {0} ({1}). URL: {2}".format(data.status,data.statusText,$(this)[0].url));
35 });
34 });
36 };
35 };
37
36
38 $(document).ready(function() {
37 $(document).ready(function() {
39
38
40 var unlockpath = function() {
39 var unlockpath = function() {
41 $('#path_unlock_icon').removeClass('icon-lock').addClass('icon-unlock');
40 $('#path_unlock_icon').removeClass('icon-lock').addClass('icon-unlock');
42 $('#paths_root_path').removeAttr('readonly').removeClass('disabled');
41 $('#paths_root_path').removeAttr('readonly').removeClass('disabled');
43 };
42 };
44
43
45 $('#path_unlock').on('click', function(e) {
44 $('#path_unlock').on('click', function(e) {
46 unlockpath();
45 unlockpath();
47 });
46 });
48
47
49 if ($('.locked_input').children().hasClass('error-message')) {
48 if ($('.locked_input').children().hasClass('error-message')) {
50 unlockpath();
49 unlockpath();
51 }
50 }
52
51
53 /* On click handler for the `Generate Apache Config` button. It sends a
52 /* On click handler for the `Generate Apache Config` button. It sends a
54 POST request to trigger the (re)generation of the mod_dav_svn config. */
53 POST request to trigger the (re)generation of the mod_dav_svn config. */
55 $('#vcs_svn_generate_cfg').on('click', function(event) {
54 $('#vcs_svn_generate_cfg').on('click', function(event) {
56 event.preventDefault();
55 event.preventDefault();
57 var url = "${h.route_path('admin_settings_vcs_svn_generate_cfg')}";
56 var url = "${h.route_path('admin_settings_vcs_svn_generate_cfg')}";
58 var jqxhr = $.post(url, {'csrf_token': CSRF_TOKEN});
57 var jqxhr = $.post(url, {'csrf_token': CSRF_TOKEN});
59 jqxhr.done(function(data) {
58 jqxhr.done(function(data) {
60 $.Topic('/notifications').publish(data);
59 $.Topic('/notifications').publish(data);
61 });
60 });
62 });
61 });
63
62
64 });
63 });
65 </script>
64 </script>
@@ -1,339 +1,311 b''
1 ## snippet for displaying vcs settings
1 ## snippet for displaying vcs settings
2 ## usage:
2 ## usage:
3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
4 ## ${vcss.vcs_settings_fields()}
4 ## ${vcss.vcs_settings_fields()}
5
5
6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, allow_repo_location_change=False, **kwargs)">
6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, **kwargs)">
7 % if display_globals:
7 % if display_globals:
8 <div class="panel panel-default">
8 <div class="panel panel-default">
9 <div class="panel-heading" id="general">
9 <div class="panel-heading" id="general">
10 <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"> ΒΆ</a></h3>
10 <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"> ΒΆ</a></h3>
11 </div>
11 </div>
12 <div class="panel-body">
12 <div class="panel-body">
13 <div class="field">
13 <div class="field">
14 <div class="checkbox">
14 <div class="checkbox">
15 ${h.checkbox('web_push_ssl' + suffix, 'True')}
15 ${h.checkbox('web_push_ssl' + suffix, 'True')}
16 <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label>
16 <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label>
17 </div>
17 </div>
18 <div class="label">
18 <div class="label">
19 <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span>
19 <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span>
20 </div>
20 </div>
21 </div>
21 </div>
22 </div>
22 </div>
23 </div>
23 </div>
24 % endif
24 % endif
25
25
26 % if display_globals:
27 <div class="panel panel-default">
28 <div class="panel-heading" id="vcs-storage-options">
29 <h3 class="panel-title">${_('Main Storage Location')}<a class="permalink" href="#vcs-storage-options"> ΒΆ</a></h3>
30 </div>
31 <div class="panel-body">
32 <div class="field">
33 <div class="inputx locked_input">
34 %if allow_repo_location_change:
35 ${h.text('paths_root_path',size=59,readonly="readonly", class_="disabled")}
36 <span id="path_unlock" class="tooltip"
37 title="${h.tooltip(_('Click to unlock. You must restart RhodeCode in order to make this setting take effect.'))}">
38 <div class="btn btn-default lock_input_button"><i id="path_unlock_icon" class="icon-lock"></i></div>
39 </span>
40 %else:
41 ${_('Repository location change is disabled. You can enable this by changing the `allow_repo_location_change` inside .ini file.')}
42 ## form still requires this but we cannot internally change it anyway
43 ${h.hidden('paths_root_path',size=30,readonly="readonly", class_="disabled")}
44 %endif
45 </div>
46 </div>
47 <div class="label">
48 <span class="help-block">${_('Filesystem location where repositories should be stored. After changing this value a restart and rescan of the repository folder are required.')}</span>
49 </div>
50 </div>
51 </div>
52 % endif
53
54 % if display_globals or repo_type in ['git', 'hg']:
26 % if display_globals or repo_type in ['git', 'hg']:
55 <div class="panel panel-default">
27 <div class="panel panel-default">
56 <div class="panel-heading" id="vcs-hooks-options">
28 <div class="panel-heading" id="vcs-hooks-options">
57 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ΒΆ</a></h3>
29 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ΒΆ</a></h3>
58 </div>
30 </div>
59 <div class="panel-body">
31 <div class="panel-body">
60 <div class="field">
32 <div class="field">
61 <div class="checkbox">
33 <div class="checkbox">
62 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
34 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
63 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
35 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
64 </div>
36 </div>
65
37
66 <div class="label">
38 <div class="label">
67 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
39 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
68 </div>
40 </div>
69 <div class="checkbox">
41 <div class="checkbox">
70 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
42 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
71 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
43 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
72 </div>
44 </div>
73 <div class="label">
45 <div class="label">
74 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
46 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
75 </div>
47 </div>
76 <div class="checkbox">
48 <div class="checkbox">
77 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
49 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
78 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
50 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
79 </div>
51 </div>
80 <div class="label">
52 <div class="label">
81 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
53 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
82 </div>
54 </div>
83 </div>
55 </div>
84 </div>
56 </div>
85 </div>
57 </div>
86 % endif
58 % endif
87
59
88 % if display_globals or repo_type in ['hg']:
60 % if display_globals or repo_type in ['hg']:
89 <div class="panel panel-default">
61 <div class="panel panel-default">
90 <div class="panel-heading" id="vcs-hg-options">
62 <div class="panel-heading" id="vcs-hg-options">
91 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ΒΆ</a></h3>
63 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ΒΆ</a></h3>
92 </div>
64 </div>
93 <div class="panel-body">
65 <div class="panel-body">
94 <div class="checkbox">
66 <div class="checkbox">
95 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
67 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
96 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
68 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
97 </div>
69 </div>
98 <div class="label">
70 <div class="label">
99 % if display_globals:
71 % if display_globals:
100 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
72 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
101 % else:
73 % else:
102 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
74 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
103 % endif
75 % endif
104 </div>
76 </div>
105
77
106 % if display_globals:
78 % if display_globals:
107 <div class="field">
79 <div class="field">
108 <div class="input">
80 <div class="input">
109 ${h.text('largefiles_usercache' + suffix, size=59)}
81 ${h.text('largefiles_usercache' + suffix, size=59)}
110 </div>
82 </div>
111 </div>
83 </div>
112 <div class="label">
84 <div class="label">
113 <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span>
85 <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span>
114 </div>
86 </div>
115 % endif
87 % endif
116
88
117 <div class="checkbox">
89 <div class="checkbox">
118 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
90 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
119 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
91 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
120 </div>
92 </div>
121 <div class="label">
93 <div class="label">
122 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
94 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
123 </div>
95 </div>
124
96
125 <div class="checkbox">
97 <div class="checkbox">
126 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
98 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
127 <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label>
99 <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label>
128 </div>
100 </div>
129 <div class="label">
101 <div class="label">
130 % if display_globals:
102 % if display_globals:
131 <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span>
103 <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span>
132 % else:
104 % else:
133 <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span>
105 <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span>
134 % endif
106 % endif
135 </div>
107 </div>
136
108
137 </div>
109 </div>
138 </div>
110 </div>
139 % endif
111 % endif
140
112
141 % if display_globals or repo_type in ['git']:
113 % if display_globals or repo_type in ['git']:
142 <div class="panel panel-default">
114 <div class="panel panel-default">
143 <div class="panel-heading" id="vcs-git-options">
115 <div class="panel-heading" id="vcs-git-options">
144 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ΒΆ</a></h3>
116 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ΒΆ</a></h3>
145 </div>
117 </div>
146 <div class="panel-body">
118 <div class="panel-body">
147 <div class="checkbox">
119 <div class="checkbox">
148 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
120 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
149 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
121 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
150 </div>
122 </div>
151 <div class="label">
123 <div class="label">
152 % if display_globals:
124 % if display_globals:
153 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
125 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
154 % else:
126 % else:
155 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
127 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
156 % endif
128 % endif
157 </div>
129 </div>
158
130
159 % if display_globals:
131 % if display_globals:
160 <div class="field">
132 <div class="field">
161 <div class="input">
133 <div class="input">
162 ${h.text('vcs_git_lfs_store_location' + suffix, size=59)}
134 ${h.text('vcs_git_lfs_store_location' + suffix, size=59)}
163 </div>
135 </div>
164 </div>
136 </div>
165 <div class="label">
137 <div class="label">
166 <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span>
138 <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span>
167 </div>
139 </div>
168 % endif
140 % endif
169 </div>
141 </div>
170 </div>
142 </div>
171 % endif
143 % endif
172
144
173 % if display_globals or repo_type in ['svn']:
145 % if display_globals or repo_type in ['svn']:
174 <div class="panel panel-default">
146 <div class="panel panel-default">
175 <div class="panel-heading" id="vcs-svn-options">
147 <div class="panel-heading" id="vcs-svn-options">
176 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ΒΆ</a></h3>
148 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ΒΆ</a></h3>
177 </div>
149 </div>
178 <div class="panel-body">
150 <div class="panel-body">
179 <div class="field">
151 <div class="field">
180 <div class="content" >
152 <div class="content" >
181 <label>${_('Repository patterns')}</label><br/>
153 <label>${_('Repository patterns')}</label><br/>
182 </div>
154 </div>
183 </div>
155 </div>
184 <div class="label">
156 <div class="label">
185 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
157 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
186 </div>
158 </div>
187
159
188 <div class="field branch_patterns">
160 <div class="field branch_patterns">
189 <div class="input" >
161 <div class="input" >
190 <label>${_('Branches')}:</label><br/>
162 <label>${_('Branches')}:</label><br/>
191 </div>
163 </div>
192 % if svn_branch_patterns:
164 % if svn_branch_patterns:
193 % for branch in svn_branch_patterns:
165 % for branch in svn_branch_patterns:
194 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
166 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
195 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
167 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
196 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
168 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
197 % if kwargs.get('disabled') != 'disabled':
169 % if kwargs.get('disabled') != 'disabled':
198 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
170 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
199 ${_('Delete')}
171 ${_('Delete')}
200 </span>
172 </span>
201 % endif
173 % endif
202 </div>
174 </div>
203 % endfor
175 % endfor
204 %endif
176 %endif
205 </div>
177 </div>
206 % if kwargs.get('disabled') != 'disabled':
178 % if kwargs.get('disabled') != 'disabled':
207 <div class="field branch_patterns">
179 <div class="field branch_patterns">
208 <div class="input" >
180 <div class="input" >
209 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
181 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
210 </div>
182 </div>
211 </div>
183 </div>
212 % endif
184 % endif
213 <div class="field tag_patterns">
185 <div class="field tag_patterns">
214 <div class="input" >
186 <div class="input" >
215 <label>${_('Tags')}:</label><br/>
187 <label>${_('Tags')}:</label><br/>
216 </div>
188 </div>
217 % if svn_tag_patterns:
189 % if svn_tag_patterns:
218 % for tag in svn_tag_patterns:
190 % for tag in svn_tag_patterns:
219 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
191 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
220 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
192 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
221 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
193 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
222 % if kwargs.get('disabled') != 'disabled':
194 % if kwargs.get('disabled') != 'disabled':
223 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
195 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
224 ${_('Delete')}
196 ${_('Delete')}
225 </span>
197 </span>
226 %endif
198 %endif
227 </div>
199 </div>
228 % endfor
200 % endfor
229 % endif
201 % endif
230 </div>
202 </div>
231 % if kwargs.get('disabled') != 'disabled':
203 % if kwargs.get('disabled') != 'disabled':
232 <div class="field tag_patterns">
204 <div class="field tag_patterns">
233 <div class="input" >
205 <div class="input" >
234 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
206 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
235 </div>
207 </div>
236 </div>
208 </div>
237 %endif
209 %endif
238 </div>
210 </div>
239 </div>
211 </div>
240 % else:
212 % else:
241 ${h.hidden('new_svn_branch' + suffix, '')}
213 ${h.hidden('new_svn_branch' + suffix, '')}
242 ${h.hidden('new_svn_tag' + suffix, '')}
214 ${h.hidden('new_svn_tag' + suffix, '')}
243 % endif
215 % endif
244
216
245
217
246 % if display_globals or repo_type in ['hg', 'git']:
218 % if display_globals or repo_type in ['hg', 'git']:
247 <div class="panel panel-default">
219 <div class="panel panel-default">
248 <div class="panel-heading" id="vcs-pull-requests-options">
220 <div class="panel-heading" id="vcs-pull-requests-options">
249 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
221 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
250 </div>
222 </div>
251 <div class="panel-body">
223 <div class="panel-body">
252 <div class="checkbox">
224 <div class="checkbox">
253 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
225 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
254 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
226 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
255 </div>
227 </div>
256 <div class="label">
228 <div class="label">
257 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
229 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
258 </div>
230 </div>
259 <div class="checkbox">
231 <div class="checkbox">
260 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
232 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
261 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
233 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
262 </div>
234 </div>
263 <div class="label">
235 <div class="label">
264 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
236 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
265 </div>
237 </div>
266 </div>
238 </div>
267 </div>
239 </div>
268 % endif
240 % endif
269
241
270 % if display_globals or repo_type in ['hg', 'git', 'svn']:
242 % if display_globals or repo_type in ['hg', 'git', 'svn']:
271 <div class="panel panel-default">
243 <div class="panel panel-default">
272 <div class="panel-heading" id="vcs-pull-requests-options">
244 <div class="panel-heading" id="vcs-pull-requests-options">
273 <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
245 <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
274 </div>
246 </div>
275 <div class="panel-body">
247 <div class="panel-body">
276 <div class="checkbox">
248 <div class="checkbox">
277 ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)}
249 ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)}
278 <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label>
250 <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label>
279 </div>
251 </div>
280 </div>
252 </div>
281 </div>
253 </div>
282 % endif
254 % endif
283
255
284 % if display_globals or repo_type in ['hg',]:
256 % if display_globals or repo_type in ['hg',]:
285 <div class="panel panel-default">
257 <div class="panel panel-default">
286 <div class="panel-heading" id="vcs-pull-requests-options">
258 <div class="panel-heading" id="vcs-pull-requests-options">
287 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ΒΆ</a></h3>
259 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ΒΆ</a></h3>
288 </div>
260 </div>
289 <div class="panel-body">
261 <div class="panel-body">
290 ## Specific HG settings
262 ## Specific HG settings
291 <div class="checkbox">
263 <div class="checkbox">
292 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
264 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
293 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
265 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
294 </div>
266 </div>
295 <div class="label">
267 <div class="label">
296 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
268 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
297 </div>
269 </div>
298
270
299 <div class="checkbox">
271 <div class="checkbox">
300 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
272 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
301 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
273 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
302 </div>
274 </div>
303 <div class="label">
275 <div class="label">
304 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
276 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
305 </div>
277 </div>
306
278
307
279
308 </div>
280 </div>
309 </div>
281 </div>
310 % endif
282 % endif
311
283
312 ## DISABLED FOR GIT FOR NOW as the rebase/close is not supported yet
284 ## DISABLED FOR GIT FOR NOW as the rebase/close is not supported yet
313 ## % if display_globals or repo_type in ['git']:
285 ## % if display_globals or repo_type in ['git']:
314 ## <div class="panel panel-default">
286 ## <div class="panel panel-default">
315 ## <div class="panel-heading" id="vcs-pull-requests-options">
287 ## <div class="panel-heading" id="vcs-pull-requests-options">
316 ## <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ΒΆ</a></h3>
288 ## <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ΒΆ</a></h3>
317 ## </div>
289 ## </div>
318 ## <div class="panel-body">
290 ## <div class="panel-body">
319 ## <div class="checkbox">
291 ## <div class="checkbox">
320 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
292 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
321 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
293 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
322 ## </div>
294 ## </div>
323 ## <div class="label">
295 ## <div class="label">
324 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
296 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
325 ## </div>
297 ## </div>
326 ##
298 ##
327 ## <div class="checkbox">
299 ## <div class="checkbox">
328 ## ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
300 ## ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
329 ## <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
301 ## <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
330 ## </div>
302 ## </div>
331 ## <div class="label">
303 ## <div class="label">
332 ## <span class="help-block">${_('Delete branch after merging it into destination branch. No effect when rebase strategy is use.')}</span>
304 ## <span class="help-block">${_('Delete branch after merging it into destination branch. No effect when rebase strategy is use.')}</span>
333 ## </div>
305 ## </div>
334 ## </div>
306 ## </div>
335 ## </div>
307 ## </div>
336 ## % endif
308 ## % endif
337
309
338
310
339 </%def>
311 </%def>
@@ -1,244 +1,246 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import os
20 import os
21 import time
21 import time
22 import logging
22 import logging
23 import datetime
23 import datetime
24 import tempfile
24 import tempfile
25 from os.path import join as jn
25 from os.path import join as jn
26 import urllib.parse
26 import urllib.parse
27
27
28 import pytest
28 import pytest
29
29
30 import rhodecode
30 from rhodecode.model.db import User
31 from rhodecode.model.db import User
31 from rhodecode.lib import auth
32 from rhodecode.lib import auth
32 from rhodecode.lib import helpers as h
33 from rhodecode.lib import helpers as h
33 from rhodecode.lib.helpers import flash
34 from rhodecode.lib.helpers import flash
34 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.hash_utils import sha1_safe
36 from rhodecode.lib.hash_utils import sha1_safe
36
37
37 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
38
39
39 __all__ = [
40 __all__ = [
40 'get_new_dir', 'TestController',
41 'get_new_dir', 'TestController',
41 'clear_cache_regions',
42 'clear_cache_regions',
42 'assert_session_flash', 'login_user', 'no_newline_id_generator',
43 'assert_session_flash', 'login_user', 'no_newline_id_generator',
43 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
44 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
44 'NEW_HG_REPO', 'NEW_GIT_REPO',
45 'NEW_HG_REPO', 'NEW_GIT_REPO',
45 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
46 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
46 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
47 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
47 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
48 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
48 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
49 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
49 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
50 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
50 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
51 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
51 ]
52 ]
52
53
53
54
54 # SOME GLOBALS FOR TESTS
55 # SOME GLOBALS FOR TESTS
55 TEST_DIR = tempfile.gettempdir()
56 TEST_DIR = tempfile.gettempdir()
56
57
57 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_{}'.format(next(tempfile._RandomNameSequence())))
58 TEST_USER_ADMIN_LOGIN = 'test_admin'
58 TEST_USER_ADMIN_LOGIN = 'test_admin'
59 TEST_USER_ADMIN_PASS = 'test12'
59 TEST_USER_ADMIN_PASS = 'test12'
60 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
60 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
61
61
62 TEST_USER_REGULAR_LOGIN = 'test_regular'
62 TEST_USER_REGULAR_LOGIN = 'test_regular'
63 TEST_USER_REGULAR_PASS = 'test12'
63 TEST_USER_REGULAR_PASS = 'test12'
64 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
64 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
65
65
66 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
66 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
67 TEST_USER_REGULAR2_PASS = 'test12'
67 TEST_USER_REGULAR2_PASS = 'test12'
68 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
68 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
69
69
70 HG_REPO = 'vcs_test_hg'
70 HG_REPO = 'vcs_test_hg'
71 GIT_REPO = 'vcs_test_git'
71 GIT_REPO = 'vcs_test_git'
72 SVN_REPO = 'vcs_test_svn'
72 SVN_REPO = 'vcs_test_svn'
73
73
74 NEW_HG_REPO = 'vcs_test_hg_new'
74 NEW_HG_REPO = 'vcs_test_hg_new'
75 NEW_GIT_REPO = 'vcs_test_git_new'
75 NEW_GIT_REPO = 'vcs_test_git_new'
76
76
77 HG_FORK = 'vcs_test_hg_fork'
77 HG_FORK = 'vcs_test_hg_fork'
78 GIT_FORK = 'vcs_test_git_fork'
78 GIT_FORK = 'vcs_test_git_fork'
79
79
80 ## VCS
80 ## VCS
81 SCM_TESTS = ['hg', 'git']
81 SCM_TESTS = ['hg', 'git']
82 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
82 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
83
83
84 TESTS_TMP_PATH = tempfile.mkdtemp(prefix='rc_test_', dir=TEST_DIR)
85
84 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
86 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
85 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, f'vcsgitclone{uniq_suffix}')
87 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, f'vcsgitclone{uniq_suffix}')
86 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, f'vcsgitpull{uniq_suffix}')
88 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, f'vcsgitpull{uniq_suffix}')
87
89
88 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
90 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
89 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, f'vcshgclone{uniq_suffix}')
91 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, f'vcshgclone{uniq_suffix}')
90 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, f'vcshgpull{uniq_suffix}')
92 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, f'vcshgpull{uniq_suffix}')
91
93
92 TEST_REPO_PREFIX = 'vcs-test'
94 TEST_REPO_PREFIX = 'vcs-test'
93
95
94
96
95 def clear_cache_regions(regions=None):
97 def clear_cache_regions(regions=None):
96 # dogpile
98 # dogpile
97 from rhodecode.lib.rc_cache import region_meta
99 from rhodecode.lib.rc_cache import region_meta
98 for region_name, region in region_meta.dogpile_cache_regions.items():
100 for region_name, region in region_meta.dogpile_cache_regions.items():
99 if not regions or region_name in regions:
101 if not regions or region_name in regions:
100 region.invalidate()
102 region.invalidate()
101
103
102
104
103 def get_new_dir(title):
105 def get_new_dir(title):
104 """
106 """
105 Returns always new directory path.
107 Returns always new directory path.
106 """
108 """
107 from rhodecode.tests.vcs.utils import get_normalized_path
109 from rhodecode.tests.vcs.utils import get_normalized_path
108 name_parts = [TEST_REPO_PREFIX]
110 name_parts = [TEST_REPO_PREFIX]
109 if title:
111 if title:
110 name_parts.append(title)
112 name_parts.append(title)
111 hex_str = sha1_safe(f'{os.getpid()} {time.time()}')
113 hex_str = sha1_safe(f'{os.getpid()} {time.time()}')
112 name_parts.append(hex_str)
114 name_parts.append(hex_str)
113 name = '-'.join(name_parts)
115 name = '-'.join(name_parts)
114 path = os.path.join(TEST_DIR, name)
116 path = jn(TEST_DIR, name)
115 return get_normalized_path(path)
117 return get_normalized_path(path)
116
118
117
119
118 def repo_id_generator(name):
120 def repo_id_generator(name):
119 numeric_hash = 0
121 numeric_hash = 0
120 for char in name:
122 for char in name:
121 numeric_hash += (ord(char))
123 numeric_hash += (ord(char))
122 return numeric_hash
124 return numeric_hash
123
125
124
126
125 @pytest.mark.usefixtures('app', 'index_location')
127 @pytest.mark.usefixtures('app', 'index_location')
126 class TestController(object):
128 class TestController(object):
127
129
128 maxDiff = None
130 maxDiff = None
129
131
130 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
132 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
131 password=TEST_USER_ADMIN_PASS):
133 password=TEST_USER_ADMIN_PASS):
132 self._logged_username = username
134 self._logged_username = username
133 self._session = login_user_session(self.app, username, password)
135 self._session = login_user_session(self.app, username, password)
134 self.csrf_token = auth.get_csrf_token(self._session)
136 self.csrf_token = auth.get_csrf_token(self._session)
135
137
136 return self._session['rhodecode_user']
138 return self._session['rhodecode_user']
137
139
138 def logout_user(self):
140 def logout_user(self):
139 logout_user_session(self.app, auth.get_csrf_token(self._session))
141 logout_user_session(self.app, auth.get_csrf_token(self._session))
140 self.csrf_token = None
142 self.csrf_token = None
141 self._logged_username = None
143 self._logged_username = None
142 self._session = None
144 self._session = None
143
145
144 def _get_logged_user(self):
146 def _get_logged_user(self):
145 return User.get_by_username(self._logged_username)
147 return User.get_by_username(self._logged_username)
146
148
147
149
148 def login_user_session(
150 def login_user_session(
149 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
151 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
150
152
151 response = app.post(
153 response = app.post(
152 h.route_path('login'),
154 h.route_path('login'),
153 {'username': username, 'password': password})
155 {'username': username, 'password': password})
154 if 'invalid user name' in response.text:
156 if 'invalid user name' in response.text:
155 pytest.fail(f'could not login using {username} {password}')
157 pytest.fail(f'could not login using {username} {password}')
156
158
157 assert response.status == '302 Found'
159 assert response.status == '302 Found'
158 response = response.follow()
160 response = response.follow()
159 assert response.status == '200 OK'
161 assert response.status == '200 OK'
160
162
161 session = response.get_session_from_response()
163 session = response.get_session_from_response()
162 assert 'rhodecode_user' in session
164 assert 'rhodecode_user' in session
163 rc_user = session['rhodecode_user']
165 rc_user = session['rhodecode_user']
164 assert rc_user.get('username') == username
166 assert rc_user.get('username') == username
165 assert rc_user.get('is_authenticated')
167 assert rc_user.get('is_authenticated')
166
168
167 return session
169 return session
168
170
169
171
170 def logout_user_session(app, csrf_token):
172 def logout_user_session(app, csrf_token):
171 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
173 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
172
174
173
175
174 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
176 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
175 password=TEST_USER_ADMIN_PASS):
177 password=TEST_USER_ADMIN_PASS):
176 return login_user_session(app, username, password)['rhodecode_user']
178 return login_user_session(app, username, password)['rhodecode_user']
177
179
178
180
179 def assert_session_flash(response, msg=None, category=None, no_=None):
181 def assert_session_flash(response, msg=None, category=None, no_=None):
180 """
182 """
181 Assert on a flash message in the current session.
183 Assert on a flash message in the current session.
182
184
183 :param response: Response from give calll, it will contain flash
185 :param response: Response from give calll, it will contain flash
184 messages or bound session with them.
186 messages or bound session with them.
185 :param msg: The expected message. Will be evaluated if a
187 :param msg: The expected message. Will be evaluated if a
186 :class:`LazyString` is passed in.
188 :class:`LazyString` is passed in.
187 :param category: Optional. If passed, the message category will be
189 :param category: Optional. If passed, the message category will be
188 checked as well.
190 checked as well.
189 :param no_: Optional. If passed, the message will be checked to NOT
191 :param no_: Optional. If passed, the message will be checked to NOT
190 be in the flash session
192 be in the flash session
191 """
193 """
192 if msg is None and no_ is None:
194 if msg is None and no_ is None:
193 raise ValueError("Parameter msg or no_ is required.")
195 raise ValueError("Parameter msg or no_ is required.")
194
196
195 if msg and no_:
197 if msg and no_:
196 raise ValueError("Please specify either msg or no_, but not both")
198 raise ValueError("Please specify either msg or no_, but not both")
197
199
198 session = response.get_session_from_response()
200 session = response.get_session_from_response()
199 messages = flash.pop_messages(session=session)
201 messages = flash.pop_messages(session=session)
200 msg = _eval_if_lazy(msg)
202 msg = _eval_if_lazy(msg)
201
203
202 if no_:
204 if no_:
203 error_msg = f'unable to detect no_ message `{no_}` in empty flash list'
205 error_msg = f'unable to detect no_ message `{no_}` in empty flash list'
204 else:
206 else:
205 error_msg = f'unable to find message `{msg}` in empty flash list'
207 error_msg = f'unable to find message `{msg}` in empty flash list'
206 assert messages, error_msg
208 assert messages, error_msg
207 message = messages[0]
209 message = messages[0]
208
210
209 message_text = _eval_if_lazy(message.message) or ''
211 message_text = _eval_if_lazy(message.message) or ''
210
212
211 if no_:
213 if no_:
212 if no_ in message_text:
214 if no_ in message_text:
213 msg = f'msg `{no_}` found in session flash.'
215 msg = f'msg `{no_}` found in session flash.'
214 pytest.fail(safe_str(msg))
216 pytest.fail(safe_str(msg))
215 else:
217 else:
216
218
217 if msg not in message_text:
219 if msg not in message_text:
218 fail_msg = f'msg `{msg}` not found in ' \
220 fail_msg = f'msg `{msg}` not found in ' \
219 f'session flash: got `{message_text}` (type:{type(message_text)}) instead'
221 f'session flash: got `{message_text}` (type:{type(message_text)}) instead'
220
222
221 pytest.fail(safe_str(fail_msg))
223 pytest.fail(safe_str(fail_msg))
222
224
223 if category:
225 if category:
224 assert category == message.category
226 assert category == message.category
225
227
226
228
227 def _eval_if_lazy(value):
229 def _eval_if_lazy(value):
228 return value.eval() if hasattr(value, 'eval') else value
230 return value.eval() if hasattr(value, 'eval') else value
229
231
230
232
231 def no_newline_id_generator(test_name):
233 def no_newline_id_generator(test_name):
232 """
234 """
233 Generates a test name without spaces or newlines characters. Used for
235 Generates a test name without spaces or newlines characters. Used for
234 nicer output of progress of test
236 nicer output of progress of test
235 """
237 """
236
238
237 test_name = safe_str(test_name)\
239 test_name = safe_str(test_name)\
238 .replace('\n', '_N') \
240 .replace('\n', '_N') \
239 .replace('\r', '_N') \
241 .replace('\r', '_N') \
240 .replace('\t', '_T') \
242 .replace('\t', '_T') \
241 .replace(' ', '_S')
243 .replace(' ', '_S')
242
244
243 return test_name or 'test-with-empty-name'
245 return test_name or 'test-with-empty-name'
244
246
@@ -1,224 +1,226 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import pytest
20 import pytest
21
21
22 from rhodecode.lib.config_utils import get_app_config
22 from rhodecode.lib.config_utils import get_app_config
23 from rhodecode.tests.fixture import TestINI
23 from rhodecode.tests.fixture import TestINI
24 from rhodecode.tests import TESTS_TMP_PATH
24 from rhodecode.tests.server_utils import RcVCSServer
25 from rhodecode.tests.server_utils import RcVCSServer
25
26
26
27
27 @pytest.fixture(scope='session')
28 @pytest.fixture(scope='session')
28 def vcsserver(request, vcsserver_port, vcsserver_factory):
29 def vcsserver(request, vcsserver_port, vcsserver_factory):
29 """
30 """
30 Session scope VCSServer.
31 Session scope VCSServer.
31
32
32 Tests which need the VCSServer have to rely on this fixture in order
33 Tests which need the VCSServer have to rely on this fixture in order
33 to ensure it will be running.
34 to ensure it will be running.
34
35
35 For specific needs, the fixture vcsserver_factory can be used. It allows to
36 For specific needs, the fixture vcsserver_factory can be used. It allows to
36 adjust the configuration file for the test run.
37 adjust the configuration file for the test run.
37
38
38 Command line args:
39 Command line args:
39
40
40 --without-vcsserver: Allows to switch this fixture off. You have to
41 --without-vcsserver: Allows to switch this fixture off. You have to
41 manually start the server.
42 manually start the server.
42
43
43 --vcsserver-port: Will expect the VCSServer to listen on this port.
44 --vcsserver-port: Will expect the VCSServer to listen on this port.
44 """
45 """
45
46
46 if not request.config.getoption('with_vcsserver'):
47 if not request.config.getoption('with_vcsserver'):
47 return None
48 return None
48
49
49 return vcsserver_factory(
50 return vcsserver_factory(
50 request, vcsserver_port=vcsserver_port)
51 request, vcsserver_port=vcsserver_port)
51
52
52
53
53 @pytest.fixture(scope='session')
54 @pytest.fixture(scope='session')
54 def vcsserver_factory(tmpdir_factory):
55 def vcsserver_factory(tmpdir_factory):
55 """
56 """
56 Use this if you need a running vcsserver with a special configuration.
57 Use this if you need a running vcsserver with a special configuration.
57 """
58 """
58
59
59 def factory(request, overrides=(), vcsserver_port=None,
60 def factory(request, overrides=(), vcsserver_port=None,
60 log_file=None, workers='2'):
61 log_file=None, workers='2'):
61
62
62 if vcsserver_port is None:
63 if vcsserver_port is None:
63 vcsserver_port = get_available_port()
64 vcsserver_port = get_available_port()
64
65
65 overrides = list(overrides)
66 overrides = list(overrides)
66 overrides.append({'server:main': {'port': vcsserver_port}})
67 overrides.append({'server:main': {'port': vcsserver_port}})
67
68
68 option_name = 'vcsserver_config_http'
69 option_name = 'vcsserver_config_http'
69 override_option_name = 'vcsserver_config_override'
70 override_option_name = 'vcsserver_config_override'
70 config_file = get_config(
71 config_file = get_config(
71 request.config, option_name=option_name,
72 request.config, option_name=option_name,
72 override_option_name=override_option_name, overrides=overrides,
73 override_option_name=override_option_name, overrides=overrides,
73 basetemp=tmpdir_factory.getbasetemp().strpath,
74 basetemp=tmpdir_factory.getbasetemp().strpath,
74 prefix='test_vcs_')
75 prefix='test_vcs_')
75
76
76 server = RcVCSServer(config_file, log_file, workers)
77 server = RcVCSServer(config_file, log_file, workers)
77 server.start()
78 server.start()
78
79
79 @request.addfinalizer
80 @request.addfinalizer
80 def cleanup():
81 def cleanup():
81 server.shutdown()
82 server.shutdown()
82
83
83 server.wait_until_ready()
84 server.wait_until_ready()
84 return server
85 return server
85
86
86 return factory
87 return factory
87
88
88
89
89 def _use_log_level(config):
90 def _use_log_level(config):
90 level = config.getoption('test_loglevel') or 'critical'
91 level = config.getoption('test_loglevel') or 'critical'
91 return level.upper()
92 return level.upper()
92
93
93
94
94 @pytest.fixture(scope='session')
95 @pytest.fixture(scope='session')
95 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
96 option_name = 'pyramid_config'
97 option_name = 'pyramid_config'
97 log_level = _use_log_level(request.config)
98 log_level = _use_log_level(request.config)
98
99
99 overrides = [
100 overrides = [
100 {'server:main': {'port': rcserver_port}},
101 {'server:main': {'port': rcserver_port}},
101 {'app:main': {
102 {'app:main': {
102 'cache_dir': '%(here)s/rc_data',
103 'cache_dir': '%(here)s/rc_data',
103 'vcs.server': f'localhost:{vcsserver_port}',
104 'vcs.server': f'localhost:{vcsserver_port}',
104 # johbo: We will always start the VCSServer on our own based on the
105 # johbo: We will always start the VCSServer on our own based on the
105 # fixtures of the test cases. For the test run it must always be
106 # fixtures of the test cases. For the test run it must always be
106 # off in the INI file.
107 # off in the INI file.
107 'vcs.start_server': 'false',
108 'vcs.start_server': 'false',
108
109
109 'vcs.server.protocol': 'http',
110 'vcs.server.protocol': 'http',
110 'vcs.scm_app_implementation': 'http',
111 'vcs.scm_app_implementation': 'http',
111 'vcs.svn.proxy.enabled': 'true',
112 'vcs.svn.proxy.enabled': 'true',
112 'vcs.hooks.protocol': 'http',
113 'vcs.hooks.protocol': 'http',
113 'vcs.hooks.host': '*',
114 'vcs.hooks.host': '*',
115 'repo_store.path': TESTS_TMP_PATH,
114 'app.service_api.token': 'service_secret_token',
116 'app.service_api.token': 'service_secret_token',
115 }},
117 }},
116
118
117 {'handler_console': {
119 {'handler_console': {
118 'class': 'StreamHandler',
120 'class': 'StreamHandler',
119 'args': '(sys.stderr,)',
121 'args': '(sys.stderr,)',
120 'level': log_level,
122 'level': log_level,
121 }},
123 }},
122
124
123 ]
125 ]
124
126
125 filename = get_config(
127 filename = get_config(
126 request.config, option_name=option_name,
128 request.config, option_name=option_name,
127 override_option_name='{}_override'.format(option_name),
129 override_option_name='{}_override'.format(option_name),
128 overrides=overrides,
130 overrides=overrides,
129 basetemp=tmpdir_factory.getbasetemp().strpath,
131 basetemp=tmpdir_factory.getbasetemp().strpath,
130 prefix='test_rce_')
132 prefix='test_rce_')
131 return filename
133 return filename
132
134
133
135
134 @pytest.fixture(scope='session')
136 @pytest.fixture(scope='session')
135 def ini_settings(ini_config):
137 def ini_settings(ini_config):
136 ini_path = ini_config
138 ini_path = ini_config
137 return get_app_config(ini_path)
139 return get_app_config(ini_path)
138
140
139
141
140 def get_available_port(min_port=40000, max_port=55555):
142 def get_available_port(min_port=40000, max_port=55555):
141 from rhodecode.lib.utils2 import get_available_port as _get_port
143 from rhodecode.lib.utils2 import get_available_port as _get_port
142 return _get_port(min_port, max_port)
144 return _get_port(min_port, max_port)
143
145
144
146
145 @pytest.fixture(scope='session')
147 @pytest.fixture(scope='session')
146 def rcserver_port(request):
148 def rcserver_port(request):
147 port = get_available_port()
149 port = get_available_port()
148 print(f'Using rhodecode port {port}')
150 print(f'Using rhodecode port {port}')
149 return port
151 return port
150
152
151
153
152 @pytest.fixture(scope='session')
154 @pytest.fixture(scope='session')
153 def vcsserver_port(request):
155 def vcsserver_port(request):
154 port = request.config.getoption('--vcsserver-port')
156 port = request.config.getoption('--vcsserver-port')
155 if port is None:
157 if port is None:
156 port = get_available_port()
158 port = get_available_port()
157 print(f'Using vcsserver port {port}')
159 print(f'Using vcsserver port {port}')
158 return port
160 return port
159
161
160
162
161 @pytest.fixture(scope='session')
163 @pytest.fixture(scope='session')
162 def available_port_factory():
164 def available_port_factory():
163 """
165 """
164 Returns a callable which returns free port numbers.
166 Returns a callable which returns free port numbers.
165 """
167 """
166 return get_available_port
168 return get_available_port
167
169
168
170
169 @pytest.fixture()
171 @pytest.fixture()
170 def available_port(available_port_factory):
172 def available_port(available_port_factory):
171 """
173 """
172 Gives you one free port for the current test.
174 Gives you one free port for the current test.
173
175
174 Uses "available_port_factory" to retrieve the port.
176 Uses "available_port_factory" to retrieve the port.
175 """
177 """
176 return available_port_factory()
178 return available_port_factory()
177
179
178
180
179 @pytest.fixture(scope='session')
181 @pytest.fixture(scope='session')
180 def testini_factory(tmpdir_factory, ini_config):
182 def testini_factory(tmpdir_factory, ini_config):
181 """
183 """
182 Factory to create an INI file based on TestINI.
184 Factory to create an INI file based on TestINI.
183
185
184 It will make sure to place the INI file in the correct directory.
186 It will make sure to place the INI file in the correct directory.
185 """
187 """
186 basetemp = tmpdir_factory.getbasetemp().strpath
188 basetemp = tmpdir_factory.getbasetemp().strpath
187 return TestIniFactory(basetemp, ini_config)
189 return TestIniFactory(basetemp, ini_config)
188
190
189
191
190 class TestIniFactory(object):
192 class TestIniFactory(object):
191
193
192 def __init__(self, basetemp, template_ini):
194 def __init__(self, basetemp, template_ini):
193 self._basetemp = basetemp
195 self._basetemp = basetemp
194 self._template_ini = template_ini
196 self._template_ini = template_ini
195
197
196 def __call__(self, ini_params, new_file_prefix='test'):
198 def __call__(self, ini_params, new_file_prefix='test'):
197 ini_file = TestINI(
199 ini_file = TestINI(
198 self._template_ini, ini_params=ini_params,
200 self._template_ini, ini_params=ini_params,
199 new_file_prefix=new_file_prefix, dir=self._basetemp)
201 new_file_prefix=new_file_prefix, dir=self._basetemp)
200 result = ini_file.create()
202 result = ini_file.create()
201 return result
203 return result
202
204
203
205
204 def get_config(
206 def get_config(
205 config, option_name, override_option_name, overrides=None,
207 config, option_name, override_option_name, overrides=None,
206 basetemp=None, prefix='test'):
208 basetemp=None, prefix='test'):
207 """
209 """
208 Find a configuration file and apply overrides for the given `prefix`.
210 Find a configuration file and apply overrides for the given `prefix`.
209 """
211 """
210 config_file = (
212 config_file = (
211 config.getoption(option_name) or config.getini(option_name))
213 config.getoption(option_name) or config.getini(option_name))
212 if not config_file:
214 if not config_file:
213 pytest.exit(
215 pytest.exit(
214 "Configuration error, could not extract {}.".format(option_name))
216 "Configuration error, could not extract {}.".format(option_name))
215
217
216 overrides = overrides or []
218 overrides = overrides or []
217 config_override = config.getoption(override_option_name)
219 config_override = config.getoption(override_option_name)
218 if config_override:
220 if config_override:
219 overrides.append(config_override)
221 overrides.append(config_override)
220 temp_ini_file = TestINI(
222 temp_ini_file = TestINI(
221 config_file, ini_params=overrides, new_file_prefix=prefix,
223 config_file, ini_params=overrides, new_file_prefix=prefix,
222 dir=basetemp)
224 dir=basetemp)
223
225
224 return temp_ini_file.create()
226 return temp_ini_file.create()
@@ -1,232 +1,232 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 import io
19 import io
20 from base64 import b64encode
20 from base64 import b64encode
21
21
22 import pytest
22 import pytest
23 from unittest.mock import patch, Mock, MagicMock
23 from unittest.mock import patch, Mock, MagicMock
24
24
25 from rhodecode.lib.middleware.simplesvn import SimpleSvn, SimpleSvnApp
25 from rhodecode.lib.middleware.simplesvn import SimpleSvn, SimpleSvnApp
26 from rhodecode.lib.utils import get_rhodecode_base_path
26 from rhodecode.lib.utils import get_rhodecode_repo_store_path
27 from rhodecode.tests import SVN_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
27 from rhodecode.tests import SVN_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
28
28
29
29
30 class TestSimpleSvn(object):
30 class TestSimpleSvn(object):
31 @pytest.fixture(autouse=True)
31 @pytest.fixture(autouse=True)
32 def simple_svn(self, baseapp, request_stub):
32 def simple_svn(self, baseapp, request_stub):
33 base_path = get_rhodecode_base_path()
33 base_path = get_rhodecode_repo_store_path()
34 self.app = SimpleSvn(
34 self.app = SimpleSvn(
35 config={'auth_ret_code': '', 'base_path': base_path},
35 config={'auth_ret_code': '', 'repo_store.path': base_path},
36 registry=request_stub.registry)
36 registry=request_stub.registry)
37
37
38 def test_get_config(self):
38 def test_get_config(self):
39 extras = {'foo': 'FOO', 'bar': 'BAR'}
39 extras = {'foo': 'FOO', 'bar': 'BAR'}
40 config = self.app._create_config(extras, repo_name='test-repo')
40 config = self.app._create_config(extras, repo_name='test-repo')
41 assert config == extras
41 assert config == extras
42
42
43 @pytest.mark.parametrize(
43 @pytest.mark.parametrize(
44 'method', ['OPTIONS', 'PROPFIND', 'GET', 'REPORT'])
44 'method', ['OPTIONS', 'PROPFIND', 'GET', 'REPORT'])
45 def test_get_action_returns_pull(self, method):
45 def test_get_action_returns_pull(self, method):
46 environment = {'REQUEST_METHOD': method}
46 environment = {'REQUEST_METHOD': method}
47 action = self.app._get_action(environment)
47 action = self.app._get_action(environment)
48 assert action == 'pull'
48 assert action == 'pull'
49
49
50 @pytest.mark.parametrize(
50 @pytest.mark.parametrize(
51 'method', [
51 'method', [
52 'MKACTIVITY', 'PROPPATCH', 'PUT', 'CHECKOUT', 'MKCOL', 'MOVE',
52 'MKACTIVITY', 'PROPPATCH', 'PUT', 'CHECKOUT', 'MKCOL', 'MOVE',
53 'COPY', 'DELETE', 'LOCK', 'UNLOCK', 'MERGE'
53 'COPY', 'DELETE', 'LOCK', 'UNLOCK', 'MERGE'
54 ])
54 ])
55 def test_get_action_returns_push(self, method):
55 def test_get_action_returns_push(self, method):
56 environment = {'REQUEST_METHOD': method}
56 environment = {'REQUEST_METHOD': method}
57 action = self.app._get_action(environment)
57 action = self.app._get_action(environment)
58 assert action == 'push'
58 assert action == 'push'
59
59
60 @pytest.mark.parametrize(
60 @pytest.mark.parametrize(
61 'path, expected_name', [
61 'path, expected_name', [
62 ('/hello-svn', 'hello-svn'),
62 ('/hello-svn', 'hello-svn'),
63 ('/hello-svn/', 'hello-svn'),
63 ('/hello-svn/', 'hello-svn'),
64 ('/group/hello-svn/', 'group/hello-svn'),
64 ('/group/hello-svn/', 'group/hello-svn'),
65 ('/group/hello-svn/!svn/vcc/default', 'group/hello-svn'),
65 ('/group/hello-svn/!svn/vcc/default', 'group/hello-svn'),
66 ])
66 ])
67 def test_get_repository_name(self, path, expected_name):
67 def test_get_repository_name(self, path, expected_name):
68 environment = {'PATH_INFO': path}
68 environment = {'PATH_INFO': path}
69 name = self.app._get_repository_name(environment)
69 name = self.app._get_repository_name(environment)
70 assert name == expected_name
70 assert name == expected_name
71
71
72 def test_get_repository_name_subfolder(self, backend_svn):
72 def test_get_repository_name_subfolder(self, backend_svn):
73 repo = backend_svn.repo
73 repo = backend_svn.repo
74 environment = {
74 environment = {
75 'PATH_INFO': '/{}/path/with/subfolders'.format(repo.repo_name)}
75 'PATH_INFO': '/{}/path/with/subfolders'.format(repo.repo_name)}
76 name = self.app._get_repository_name(environment)
76 name = self.app._get_repository_name(environment)
77 assert name == repo.repo_name
77 assert name == repo.repo_name
78
78
79 def test_create_wsgi_app(self):
79 def test_create_wsgi_app(self):
80 with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method:
80 with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method:
81 mock_method.return_value = False
81 mock_method.return_value = False
82 with patch('rhodecode.lib.middleware.simplesvn.DisabledSimpleSvnApp') as (
82 with patch('rhodecode.lib.middleware.simplesvn.DisabledSimpleSvnApp') as (
83 wsgi_app_mock):
83 wsgi_app_mock):
84 config = Mock()
84 config = Mock()
85 wsgi_app = self.app._create_wsgi_app(
85 wsgi_app = self.app._create_wsgi_app(
86 repo_path='', repo_name='', config=config)
86 repo_path='', repo_name='', config=config)
87
87
88 wsgi_app_mock.assert_called_once_with(config)
88 wsgi_app_mock.assert_called_once_with(config)
89 assert wsgi_app == wsgi_app_mock()
89 assert wsgi_app == wsgi_app_mock()
90
90
91 def test_create_wsgi_app_when_enabled(self):
91 def test_create_wsgi_app_when_enabled(self):
92 with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method:
92 with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method:
93 mock_method.return_value = True
93 mock_method.return_value = True
94 with patch('rhodecode.lib.middleware.simplesvn.SimpleSvnApp') as (
94 with patch('rhodecode.lib.middleware.simplesvn.SimpleSvnApp') as (
95 wsgi_app_mock):
95 wsgi_app_mock):
96 config = Mock()
96 config = Mock()
97 wsgi_app = self.app._create_wsgi_app(
97 wsgi_app = self.app._create_wsgi_app(
98 repo_path='', repo_name='', config=config)
98 repo_path='', repo_name='', config=config)
99
99
100 wsgi_app_mock.assert_called_once_with(config)
100 wsgi_app_mock.assert_called_once_with(config)
101 assert wsgi_app == wsgi_app_mock()
101 assert wsgi_app == wsgi_app_mock()
102
102
103
103
104 def basic_auth(username, password):
104 def basic_auth(username, password):
105 token = b64encode(f"{username}:{password}".encode('utf-8')).decode("ascii")
105 token = b64encode(f"{username}:{password}".encode('utf-8')).decode("ascii")
106 return f'Basic {token}'
106 return f'Basic {token}'
107
107
108
108
109 class TestSimpleSvnApp(object):
109 class TestSimpleSvnApp(object):
110 data = b'<xml></xml>'
110 data = b'<xml></xml>'
111 path = SVN_REPO
111 path = SVN_REPO
112 wsgi_input = io.BytesIO(data)
112 wsgi_input = io.BytesIO(data)
113 environment = {
113 environment = {
114 'HTTP_DAV': (
114 'HTTP_DAV': (
115 'http://subversion.tigris.org/xmlns/dav/svn/depth, '
115 'http://subversion.tigris.org/xmlns/dav/svn/depth, '
116 'http://subversion.tigris.org/xmlns/dav/svn/mergeinfo'),
116 'http://subversion.tigris.org/xmlns/dav/svn/mergeinfo'),
117 'HTTP_USER_AGENT': 'SVN/1.14.1 (x86_64-linux) serf/1.3.8',
117 'HTTP_USER_AGENT': 'SVN/1.14.1 (x86_64-linux) serf/1.3.8',
118 'REQUEST_METHOD': 'OPTIONS',
118 'REQUEST_METHOD': 'OPTIONS',
119 'PATH_INFO': path,
119 'PATH_INFO': path,
120 'wsgi.input': wsgi_input,
120 'wsgi.input': wsgi_input,
121 'CONTENT_TYPE': 'text/xml',
121 'CONTENT_TYPE': 'text/xml',
122 'CONTENT_LENGTH': '130',
122 'CONTENT_LENGTH': '130',
123 'Authorization': basic_auth(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
123 'Authorization': basic_auth(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
124 }
124 }
125
125
126 def setup_method(self, method):
126 def setup_method(self, method):
127 # note(marcink): this is hostname from docker compose used for testing...
127 # note(marcink): this is hostname from docker compose used for testing...
128 self.host = 'http://svn:8090'
128 self.host = 'http://svn:8090'
129 base_path = get_rhodecode_base_path()
129 base_path = get_rhodecode_repo_store_path()
130 self.app = SimpleSvnApp(
130 self.app = SimpleSvnApp(
131 config={'subversion_http_server_url': self.host,
131 config={'subversion_http_server_url': self.host,
132 'base_path': base_path})
132 'base_path': base_path})
133
133
134 def test_get_request_headers_with_content_type(self):
134 def test_get_request_headers_with_content_type(self):
135 expected_headers = {
135 expected_headers = {
136 'Dav': self.environment['HTTP_DAV'],
136 'Dav': self.environment['HTTP_DAV'],
137 'User-Agent': self.environment['HTTP_USER_AGENT'],
137 'User-Agent': self.environment['HTTP_USER_AGENT'],
138 'Content-Type': self.environment['CONTENT_TYPE'],
138 'Content-Type': self.environment['CONTENT_TYPE'],
139 'Content-Length': self.environment['CONTENT_LENGTH'],
139 'Content-Length': self.environment['CONTENT_LENGTH'],
140 'Authorization': self.environment['Authorization']
140 'Authorization': self.environment['Authorization']
141 }
141 }
142 headers = self.app._get_request_headers(self.environment)
142 headers = self.app._get_request_headers(self.environment)
143 assert headers == expected_headers
143 assert headers == expected_headers
144
144
145 def test_get_request_headers_without_content_type(self):
145 def test_get_request_headers_without_content_type(self):
146 environment = self.environment.copy()
146 environment = self.environment.copy()
147 environment.pop('CONTENT_TYPE')
147 environment.pop('CONTENT_TYPE')
148 expected_headers = {
148 expected_headers = {
149 'Dav': environment['HTTP_DAV'],
149 'Dav': environment['HTTP_DAV'],
150 'Content-Length': self.environment['CONTENT_LENGTH'],
150 'Content-Length': self.environment['CONTENT_LENGTH'],
151 'User-Agent': environment['HTTP_USER_AGENT'],
151 'User-Agent': environment['HTTP_USER_AGENT'],
152 'Authorization': self.environment['Authorization']
152 'Authorization': self.environment['Authorization']
153 }
153 }
154 request_headers = self.app._get_request_headers(environment)
154 request_headers = self.app._get_request_headers(environment)
155 assert request_headers == expected_headers
155 assert request_headers == expected_headers
156
156
157 def test_get_response_headers(self):
157 def test_get_response_headers(self):
158 headers = {
158 headers = {
159 'Connection': 'keep-alive',
159 'Connection': 'keep-alive',
160 'Keep-Alive': 'timeout=5, max=100',
160 'Keep-Alive': 'timeout=5, max=100',
161 'Transfer-Encoding': 'chunked',
161 'Transfer-Encoding': 'chunked',
162 'Content-Encoding': 'gzip',
162 'Content-Encoding': 'gzip',
163 'MS-Author-Via': 'DAV',
163 'MS-Author-Via': 'DAV',
164 'SVN-Supported-Posts': 'create-txn-with-props'
164 'SVN-Supported-Posts': 'create-txn-with-props'
165 }
165 }
166 expected_headers = [
166 expected_headers = [
167 ('MS-Author-Via', 'DAV'),
167 ('MS-Author-Via', 'DAV'),
168 ('SVN-Supported-Posts', 'create-txn-with-props'),
168 ('SVN-Supported-Posts', 'create-txn-with-props'),
169 ]
169 ]
170 response_headers = self.app._get_response_headers(headers)
170 response_headers = self.app._get_response_headers(headers)
171 assert sorted(response_headers) == sorted(expected_headers)
171 assert sorted(response_headers) == sorted(expected_headers)
172
172
173 @pytest.mark.parametrize('svn_http_url, path_info, expected_url', [
173 @pytest.mark.parametrize('svn_http_url, path_info, expected_url', [
174 ('http://localhost:8200', '/repo_name', 'http://localhost:8200/repo_name'),
174 ('http://localhost:8200', '/repo_name', 'http://localhost:8200/repo_name'),
175 ('http://localhost:8200///', '/repo_name', 'http://localhost:8200/repo_name'),
175 ('http://localhost:8200///', '/repo_name', 'http://localhost:8200/repo_name'),
176 ('http://localhost:8200', '/group/repo_name', 'http://localhost:8200/group/repo_name'),
176 ('http://localhost:8200', '/group/repo_name', 'http://localhost:8200/group/repo_name'),
177 ('http://localhost:8200/', '/group/repo_name', 'http://localhost:8200/group/repo_name'),
177 ('http://localhost:8200/', '/group/repo_name', 'http://localhost:8200/group/repo_name'),
178 ('http://localhost:8200/prefix', '/repo_name', 'http://localhost:8200/prefix/repo_name'),
178 ('http://localhost:8200/prefix', '/repo_name', 'http://localhost:8200/prefix/repo_name'),
179 ('http://localhost:8200/prefix', 'repo_name', 'http://localhost:8200/prefix/repo_name'),
179 ('http://localhost:8200/prefix', 'repo_name', 'http://localhost:8200/prefix/repo_name'),
180 ('http://localhost:8200/prefix', '/group/repo_name', 'http://localhost:8200/prefix/group/repo_name')
180 ('http://localhost:8200/prefix', '/group/repo_name', 'http://localhost:8200/prefix/group/repo_name')
181 ])
181 ])
182 def test_get_url(self, svn_http_url, path_info, expected_url):
182 def test_get_url(self, svn_http_url, path_info, expected_url):
183 url = self.app._get_url(svn_http_url, path_info)
183 url = self.app._get_url(svn_http_url, path_info)
184 assert url == expected_url
184 assert url == expected_url
185
185
186 def test_call(self):
186 def test_call(self):
187 start_response = Mock()
187 start_response = Mock()
188 response_mock = Mock()
188 response_mock = Mock()
189 response_mock.headers = {
189 response_mock.headers = {
190 'Content-Encoding': 'gzip',
190 'Content-Encoding': 'gzip',
191 'MS-Author-Via': 'DAV',
191 'MS-Author-Via': 'DAV',
192 'SVN-Supported-Posts': 'create-txn-with-props'
192 'SVN-Supported-Posts': 'create-txn-with-props'
193 }
193 }
194
194
195 from rhodecode.lib.middleware.simplesvn import requests
195 from rhodecode.lib.middleware.simplesvn import requests
196 original_request = requests.Session.request
196 original_request = requests.Session.request
197
197
198 with patch('rhodecode.lib.middleware.simplesvn.requests.Session.request', autospec=True) as request_mock:
198 with patch('rhodecode.lib.middleware.simplesvn.requests.Session.request', autospec=True) as request_mock:
199 # Use side_effect to call the original method
199 # Use side_effect to call the original method
200 request_mock.side_effect = original_request
200 request_mock.side_effect = original_request
201 self.app(self.environment, start_response)
201 self.app(self.environment, start_response)
202
202
203 expected_url = f'{self.host.strip("/")}/{self.path}'
203 expected_url = f'{self.host.strip("/")}/{self.path}'
204 expected_request_headers = {
204 expected_request_headers = {
205 'Dav': self.environment['HTTP_DAV'],
205 'Dav': self.environment['HTTP_DAV'],
206 'User-Agent': self.environment['HTTP_USER_AGENT'],
206 'User-Agent': self.environment['HTTP_USER_AGENT'],
207 'Authorization': self.environment['Authorization'],
207 'Authorization': self.environment['Authorization'],
208 'Content-Type': self.environment['CONTENT_TYPE'],
208 'Content-Type': self.environment['CONTENT_TYPE'],
209 'Content-Length': self.environment['CONTENT_LENGTH'],
209 'Content-Length': self.environment['CONTENT_LENGTH'],
210 }
210 }
211
211
212 # Check if the method was called
212 # Check if the method was called
213 assert request_mock.called
213 assert request_mock.called
214 assert request_mock.call_count == 1
214 assert request_mock.call_count == 1
215
215
216 # Extract the session instance from the first call
216 # Extract the session instance from the first call
217 called_with_session = request_mock.call_args[0][0]
217 called_with_session = request_mock.call_args[0][0]
218
218
219 request_mock.assert_called_once_with(
219 request_mock.assert_called_once_with(
220 called_with_session,
220 called_with_session,
221 self.environment['REQUEST_METHOD'], expected_url,
221 self.environment['REQUEST_METHOD'], expected_url,
222 data=self.data, headers=expected_request_headers, stream=False)
222 data=self.data, headers=expected_request_headers, stream=False)
223
223
224 expected_response_headers = [
224 expected_response_headers = [
225 ('SVN-Supported-Posts', 'create-txn-with-props'),
225 ('SVN-Supported-Posts', 'create-txn-with-props'),
226 ('MS-Author-Via', 'DAV'),
226 ('MS-Author-Via', 'DAV'),
227 ]
227 ]
228
228
229 # TODO: the svn doesn't have a repo for testing
229 # TODO: the svn doesn't have a repo for testing
230 #args, _ = start_response.call_args
230 #args, _ = start_response.call_args
231 #assert args[0] == '200 OK'
231 #assert args[0] == '200 OK'
232 #assert sorted(args[1]) == sorted(expected_response_headers)
232 #assert sorted(args[1]) == sorted(expected_response_headers)
@@ -1,1140 +1,1114 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.utils2 import str2bool
23 from rhodecode.lib.utils2 import str2bool
24 from rhodecode.model.meta import Session
24 from rhodecode.model.meta import Session
25 from rhodecode.model.settings import VcsSettingsModel, UiSetting
25 from rhodecode.model.settings import VcsSettingsModel, UiSetting
26
26
27
27
28 HOOKS_FORM_DATA = {
28 HOOKS_FORM_DATA = {
29 'hooks_changegroup_repo_size': True,
29 'hooks_changegroup_repo_size': True,
30 'hooks_changegroup_push_logger': True,
30 'hooks_changegroup_push_logger': True,
31 'hooks_outgoing_pull_logger': True
31 'hooks_outgoing_pull_logger': True
32 }
32 }
33
33
34 SVN_FORM_DATA = {
34 SVN_FORM_DATA = {
35 'new_svn_branch': 'test-branch',
35 'new_svn_branch': 'test-branch',
36 'new_svn_tag': 'test-tag'
36 'new_svn_tag': 'test-tag'
37 }
37 }
38
38
39 GENERAL_FORM_DATA = {
39 GENERAL_FORM_DATA = {
40 'rhodecode_pr_merge_enabled': True,
40 'rhodecode_pr_merge_enabled': True,
41 'rhodecode_use_outdated_comments': True,
41 'rhodecode_use_outdated_comments': True,
42 'rhodecode_hg_use_rebase_for_merging': True,
42 'rhodecode_hg_use_rebase_for_merging': True,
43 'rhodecode_hg_close_branch_before_merging': True,
43 'rhodecode_hg_close_branch_before_merging': True,
44 'rhodecode_git_use_rebase_for_merging': True,
44 'rhodecode_git_use_rebase_for_merging': True,
45 'rhodecode_git_close_branch_before_merging': True,
45 'rhodecode_git_close_branch_before_merging': True,
46 'rhodecode_diff_cache': True,
46 'rhodecode_diff_cache': True,
47 }
47 }
48
48
49
49
50 class TestInheritGlobalSettingsProperty(object):
50 class TestInheritGlobalSettingsProperty(object):
51 def test_get_raises_exception_when_repository_not_specified(self):
51 def test_get_raises_exception_when_repository_not_specified(self):
52 model = VcsSettingsModel()
52 model = VcsSettingsModel()
53 with pytest.raises(Exception) as exc_info:
53 with pytest.raises(Exception) as exc_info:
54 model.inherit_global_settings
54 model.inherit_global_settings
55 assert str(exc_info.value) == 'Repository is not specified'
55 assert str(exc_info.value) == 'Repository is not specified'
56
56
57 def test_true_is_returned_when_value_is_not_found(self, repo_stub):
57 def test_true_is_returned_when_value_is_not_found(self, repo_stub):
58 model = VcsSettingsModel(repo=repo_stub.repo_name)
58 model = VcsSettingsModel(repo=repo_stub.repo_name)
59 assert model.inherit_global_settings is True
59 assert model.inherit_global_settings is True
60
60
61 def test_value_is_returned(self, repo_stub, settings_util):
61 def test_value_is_returned(self, repo_stub, settings_util):
62 model = VcsSettingsModel(repo=repo_stub.repo_name)
62 model = VcsSettingsModel(repo=repo_stub.repo_name)
63 settings_util.create_repo_rhodecode_setting(
63 settings_util.create_repo_rhodecode_setting(
64 repo_stub, VcsSettingsModel.INHERIT_SETTINGS, False, 'bool')
64 repo_stub, VcsSettingsModel.INHERIT_SETTINGS, False, 'bool')
65 assert model.inherit_global_settings is False
65 assert model.inherit_global_settings is False
66
66
67 def test_value_is_set(self, repo_stub):
67 def test_value_is_set(self, repo_stub):
68 model = VcsSettingsModel(repo=repo_stub.repo_name)
68 model = VcsSettingsModel(repo=repo_stub.repo_name)
69 model.inherit_global_settings = False
69 model.inherit_global_settings = False
70 setting = model.repo_settings.get_setting_by_name(
70 setting = model.repo_settings.get_setting_by_name(
71 VcsSettingsModel.INHERIT_SETTINGS)
71 VcsSettingsModel.INHERIT_SETTINGS)
72 try:
72 try:
73 assert setting.app_settings_type == 'bool'
73 assert setting.app_settings_type == 'bool'
74 assert setting.app_settings_value is False
74 assert setting.app_settings_value is False
75 finally:
75 finally:
76 Session().delete(setting)
76 Session().delete(setting)
77 Session().commit()
77 Session().commit()
78
78
79 def test_set_raises_exception_when_repository_not_specified(self):
79 def test_set_raises_exception_when_repository_not_specified(self):
80 model = VcsSettingsModel()
80 model = VcsSettingsModel()
81 with pytest.raises(Exception) as exc_info:
81 with pytest.raises(Exception) as exc_info:
82 model.inherit_global_settings = False
82 model.inherit_global_settings = False
83 assert str(exc_info.value) == 'Repository is not specified'
83 assert str(exc_info.value) == 'Repository is not specified'
84
84
85
85
86 class TestVcsSettingsModel(object):
86 class TestVcsSettingsModel(object):
87 def test_global_svn_branch_patterns(self):
87 def test_global_svn_branch_patterns(self):
88 model = VcsSettingsModel()
88 model = VcsSettingsModel()
89 expected_result = {'test': 'test'}
89 expected_result = {'test': 'test'}
90 with mock.patch.object(model, 'global_settings') as settings_mock:
90 with mock.patch.object(model, 'global_settings') as settings_mock:
91 get_settings = settings_mock.get_ui_by_section
91 get_settings = settings_mock.get_ui_by_section
92 get_settings.return_value = expected_result
92 get_settings.return_value = expected_result
93 settings_mock.return_value = expected_result
93 settings_mock.return_value = expected_result
94 result = model.get_global_svn_branch_patterns()
94 result = model.get_global_svn_branch_patterns()
95
95
96 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
96 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
97 assert expected_result == result
97 assert expected_result == result
98
98
99 def test_repo_svn_branch_patterns(self):
99 def test_repo_svn_branch_patterns(self):
100 model = VcsSettingsModel()
100 model = VcsSettingsModel()
101 expected_result = {'test': 'test'}
101 expected_result = {'test': 'test'}
102 with mock.patch.object(model, 'repo_settings') as settings_mock:
102 with mock.patch.object(model, 'repo_settings') as settings_mock:
103 get_settings = settings_mock.get_ui_by_section
103 get_settings = settings_mock.get_ui_by_section
104 get_settings.return_value = expected_result
104 get_settings.return_value = expected_result
105 settings_mock.return_value = expected_result
105 settings_mock.return_value = expected_result
106 result = model.get_repo_svn_branch_patterns()
106 result = model.get_repo_svn_branch_patterns()
107
107
108 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
108 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
109 assert expected_result == result
109 assert expected_result == result
110
110
111 def test_repo_svn_branch_patterns_raises_exception_when_repo_is_not_set(
111 def test_repo_svn_branch_patterns_raises_exception_when_repo_is_not_set(
112 self):
112 self):
113 model = VcsSettingsModel()
113 model = VcsSettingsModel()
114 with pytest.raises(Exception) as exc_info:
114 with pytest.raises(Exception) as exc_info:
115 model.get_repo_svn_branch_patterns()
115 model.get_repo_svn_branch_patterns()
116 assert str(exc_info.value) == 'Repository is not specified'
116 assert str(exc_info.value) == 'Repository is not specified'
117
117
118 def test_global_svn_tag_patterns(self):
118 def test_global_svn_tag_patterns(self):
119 model = VcsSettingsModel()
119 model = VcsSettingsModel()
120 expected_result = {'test': 'test'}
120 expected_result = {'test': 'test'}
121 with mock.patch.object(model, 'global_settings') as settings_mock:
121 with mock.patch.object(model, 'global_settings') as settings_mock:
122 get_settings = settings_mock.get_ui_by_section
122 get_settings = settings_mock.get_ui_by_section
123 get_settings.return_value = expected_result
123 get_settings.return_value = expected_result
124 settings_mock.return_value = expected_result
124 settings_mock.return_value = expected_result
125 result = model.get_global_svn_tag_patterns()
125 result = model.get_global_svn_tag_patterns()
126
126
127 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
127 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
128 assert expected_result == result
128 assert expected_result == result
129
129
130 def test_repo_svn_tag_patterns(self):
130 def test_repo_svn_tag_patterns(self):
131 model = VcsSettingsModel()
131 model = VcsSettingsModel()
132 expected_result = {'test': 'test'}
132 expected_result = {'test': 'test'}
133 with mock.patch.object(model, 'repo_settings') as settings_mock:
133 with mock.patch.object(model, 'repo_settings') as settings_mock:
134 get_settings = settings_mock.get_ui_by_section
134 get_settings = settings_mock.get_ui_by_section
135 get_settings.return_value = expected_result
135 get_settings.return_value = expected_result
136 settings_mock.return_value = expected_result
136 settings_mock.return_value = expected_result
137 result = model.get_repo_svn_tag_patterns()
137 result = model.get_repo_svn_tag_patterns()
138
138
139 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
139 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
140 assert expected_result == result
140 assert expected_result == result
141
141
142 def test_repo_svn_tag_patterns_raises_exception_when_repo_is_not_set(self):
142 def test_repo_svn_tag_patterns_raises_exception_when_repo_is_not_set(self):
143 model = VcsSettingsModel()
143 model = VcsSettingsModel()
144 with pytest.raises(Exception) as exc_info:
144 with pytest.raises(Exception) as exc_info:
145 model.get_repo_svn_tag_patterns()
145 model.get_repo_svn_tag_patterns()
146 assert str(exc_info.value) == 'Repository is not specified'
146 assert str(exc_info.value) == 'Repository is not specified'
147
147
148 def test_get_global_settings(self):
148 def test_get_global_settings(self):
149 expected_result = {'test': 'test'}
149 expected_result = {'test': 'test'}
150 model = VcsSettingsModel()
150 model = VcsSettingsModel()
151 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
151 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
152 collect_mock.return_value = expected_result
152 collect_mock.return_value = expected_result
153 result = model.get_global_settings()
153 result = model.get_global_settings()
154
154
155 collect_mock.assert_called_once_with(global_=True)
155 collect_mock.assert_called_once_with(global_=True)
156 assert result == expected_result
156 assert result == expected_result
157
157
158 def test_get_repo_settings(self, repo_stub):
158 def test_get_repo_settings(self, repo_stub):
159 model = VcsSettingsModel(repo=repo_stub.repo_name)
159 model = VcsSettingsModel(repo=repo_stub.repo_name)
160 expected_result = {'test': 'test'}
160 expected_result = {'test': 'test'}
161 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
161 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
162 collect_mock.return_value = expected_result
162 collect_mock.return_value = expected_result
163 result = model.get_repo_settings()
163 result = model.get_repo_settings()
164
164
165 collect_mock.assert_called_once_with(global_=False)
165 collect_mock.assert_called_once_with(global_=False)
166 assert result == expected_result
166 assert result == expected_result
167
167
168 @pytest.mark.parametrize('settings, global_', [
168 @pytest.mark.parametrize('settings, global_', [
169 ('global_settings', True),
169 ('global_settings', True),
170 ('repo_settings', False)
170 ('repo_settings', False)
171 ])
171 ])
172 def test_collect_all_settings(self, settings, global_):
172 def test_collect_all_settings(self, settings, global_):
173 model = VcsSettingsModel()
173 model = VcsSettingsModel()
174 result_mock = self._mock_result()
174 result_mock = self._mock_result()
175
175
176 settings_patch = mock.patch.object(model, settings)
176 settings_patch = mock.patch.object(model, settings)
177 with settings_patch as settings_mock:
177 with settings_patch as settings_mock:
178 settings_mock.get_ui_by_section_and_key.return_value = result_mock
178 settings_mock.get_ui_by_section_and_key.return_value = result_mock
179 settings_mock.get_setting_by_name.return_value = result_mock
179 settings_mock.get_setting_by_name.return_value = result_mock
180 result = model._collect_all_settings(global_=global_)
180 result = model._collect_all_settings(global_=global_)
181
181
182 ui_settings = model.HG_SETTINGS + model.GIT_SETTINGS + model.HOOKS_SETTINGS
182 ui_settings = model.HG_SETTINGS + model.GIT_SETTINGS + model.HOOKS_SETTINGS
183 self._assert_get_settings_calls(
183 self._assert_get_settings_calls(
184 settings_mock, ui_settings, model.GENERAL_SETTINGS)
184 settings_mock, ui_settings, model.GENERAL_SETTINGS)
185 self._assert_collect_all_settings_result(
185 self._assert_collect_all_settings_result(
186 ui_settings, model.GENERAL_SETTINGS, result)
186 ui_settings, model.GENERAL_SETTINGS, result)
187
187
188 @pytest.mark.parametrize('settings, global_', [
188 @pytest.mark.parametrize('settings, global_', [
189 ('global_settings', True),
189 ('global_settings', True),
190 ('repo_settings', False)
190 ('repo_settings', False)
191 ])
191 ])
192 def test_collect_all_settings_without_empty_value(self, settings, global_):
192 def test_collect_all_settings_without_empty_value(self, settings, global_):
193 model = VcsSettingsModel()
193 model = VcsSettingsModel()
194
194
195 settings_patch = mock.patch.object(model, settings)
195 settings_patch = mock.patch.object(model, settings)
196 with settings_patch as settings_mock:
196 with settings_patch as settings_mock:
197 settings_mock.get_ui_by_section_and_key.return_value = None
197 settings_mock.get_ui_by_section_and_key.return_value = None
198 settings_mock.get_setting_by_name.return_value = None
198 settings_mock.get_setting_by_name.return_value = None
199 result = model._collect_all_settings(global_=global_)
199 result = model._collect_all_settings(global_=global_)
200
200
201 assert result == {}
201 assert result == {}
202
202
203 def _mock_result(self):
203 def _mock_result(self):
204 result_mock = mock.Mock()
204 result_mock = mock.Mock()
205 result_mock.ui_value = 'ui_value'
205 result_mock.ui_value = 'ui_value'
206 result_mock.ui_active = True
206 result_mock.ui_active = True
207 result_mock.app_settings_value = 'setting_value'
207 result_mock.app_settings_value = 'setting_value'
208 return result_mock
208 return result_mock
209
209
210 def _assert_get_settings_calls(
210 def _assert_get_settings_calls(
211 self, settings_mock, ui_settings, general_settings):
211 self, settings_mock, ui_settings, general_settings):
212 assert (
212 assert (
213 settings_mock.get_ui_by_section_and_key.call_count ==
213 settings_mock.get_ui_by_section_and_key.call_count ==
214 len(ui_settings))
214 len(ui_settings))
215 assert (
215 assert (
216 settings_mock.get_setting_by_name.call_count ==
216 settings_mock.get_setting_by_name.call_count ==
217 len(general_settings))
217 len(general_settings))
218
218
219 for section, key in ui_settings:
219 for section, key in ui_settings:
220 expected_call = mock.call(section, key)
220 expected_call = mock.call(section, key)
221 assert (
221 assert (
222 expected_call in
222 expected_call in
223 settings_mock.get_ui_by_section_and_key.call_args_list)
223 settings_mock.get_ui_by_section_and_key.call_args_list)
224
224
225 for name in general_settings:
225 for name in general_settings:
226 expected_call = mock.call(name)
226 expected_call = mock.call(name)
227 assert (
227 assert (
228 expected_call in
228 expected_call in
229 settings_mock.get_setting_by_name.call_args_list)
229 settings_mock.get_setting_by_name.call_args_list)
230
230
231 def _assert_collect_all_settings_result(
231 def _assert_collect_all_settings_result(
232 self, ui_settings, general_settings, result):
232 self, ui_settings, general_settings, result):
233 expected_result = {}
233 expected_result = {}
234 for section, key in ui_settings:
234 for section, key in ui_settings:
235 key = '{}_{}'.format(section, key.replace('.', '_'))
235 key = '{}_{}'.format(section, key.replace('.', '_'))
236
236
237 if section in ('extensions', 'hooks'):
237 if section in ('extensions', 'hooks'):
238 value = True
238 value = True
239 elif key in ['vcs_git_lfs_enabled']:
239 elif key in ['vcs_git_lfs_enabled']:
240 value = True
240 value = True
241 else:
241 else:
242 value = 'ui_value'
242 value = 'ui_value'
243 expected_result[key] = value
243 expected_result[key] = value
244
244
245 for name in general_settings:
245 for name in general_settings:
246 key = 'rhodecode_' + name
246 key = 'rhodecode_' + name
247 expected_result[key] = 'setting_value'
247 expected_result[key] = 'setting_value'
248
248
249 assert expected_result == result
249 assert expected_result == result
250
250
251
251
252 class TestCreateOrUpdateRepoHookSettings(object):
252 class TestCreateOrUpdateRepoHookSettings(object):
253 def test_create_when_no_repo_object_found(self, repo_stub):
253 def test_create_when_no_repo_object_found(self, repo_stub):
254 model = VcsSettingsModel(repo=repo_stub.repo_name)
254 model = VcsSettingsModel(repo=repo_stub.repo_name)
255
255
256 self._create_settings(model, HOOKS_FORM_DATA)
256 self._create_settings(model, HOOKS_FORM_DATA)
257
257
258 cleanup = []
258 cleanup = []
259 try:
259 try:
260 for section, key in model.HOOKS_SETTINGS:
260 for section, key in model.HOOKS_SETTINGS:
261 ui = model.repo_settings.get_ui_by_section_and_key(
261 ui = model.repo_settings.get_ui_by_section_and_key(
262 section, key)
262 section, key)
263 assert ui.ui_active is True
263 assert ui.ui_active is True
264 cleanup.append(ui)
264 cleanup.append(ui)
265 finally:
265 finally:
266 for ui in cleanup:
266 for ui in cleanup:
267 Session().delete(ui)
267 Session().delete(ui)
268 Session().commit()
268 Session().commit()
269
269
270 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
270 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
271 model = VcsSettingsModel(repo=repo_stub.repo_name)
271 model = VcsSettingsModel(repo=repo_stub.repo_name)
272
272
273 deleted_key = 'hooks_changegroup_repo_size'
273 deleted_key = 'hooks_changegroup_repo_size'
274 data = HOOKS_FORM_DATA.copy()
274 data = HOOKS_FORM_DATA.copy()
275 data.pop(deleted_key)
275 data.pop(deleted_key)
276
276
277 with pytest.raises(ValueError) as exc_info:
277 with pytest.raises(ValueError) as exc_info:
278 model.create_or_update_repo_hook_settings(data)
278 model.create_or_update_repo_hook_settings(data)
279 Session().commit()
279 Session().commit()
280
280
281 msg = 'The given data does not contain {} key'.format(deleted_key)
281 msg = 'The given data does not contain {} key'.format(deleted_key)
282 assert str(exc_info.value) == msg
282 assert str(exc_info.value) == msg
283
283
284 def test_update_when_repo_object_found(self, repo_stub, settings_util):
284 def test_update_when_repo_object_found(self, repo_stub, settings_util):
285 model = VcsSettingsModel(repo=repo_stub.repo_name)
285 model = VcsSettingsModel(repo=repo_stub.repo_name)
286 for section, key in model.HOOKS_SETTINGS:
286 for section, key in model.HOOKS_SETTINGS:
287 settings_util.create_repo_rhodecode_ui(
287 settings_util.create_repo_rhodecode_ui(
288 repo_stub, section, None, key=key, active=False)
288 repo_stub, section, None, key=key, active=False)
289 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
289 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
290 Session().commit()
290 Session().commit()
291
291
292 for section, key in model.HOOKS_SETTINGS:
292 for section, key in model.HOOKS_SETTINGS:
293 ui = model.repo_settings.get_ui_by_section_and_key(section, key)
293 ui = model.repo_settings.get_ui_by_section_and_key(section, key)
294 assert ui.ui_active is True
294 assert ui.ui_active is True
295
295
296 def _create_settings(self, model, data):
296 def _create_settings(self, model, data):
297 global_patch = mock.patch.object(model, 'global_settings')
297 global_patch = mock.patch.object(model, 'global_settings')
298 global_setting = mock.Mock()
298 global_setting = mock.Mock()
299 global_setting.ui_value = 'Test value'
299 global_setting.ui_value = 'Test value'
300 with global_patch as global_mock:
300 with global_patch as global_mock:
301 global_mock.get_ui_by_section_and_key.return_value = global_setting
301 global_mock.get_ui_by_section_and_key.return_value = global_setting
302 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
302 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
303 Session().commit()
303 Session().commit()
304
304
305
305
306 class TestUpdateGlobalHookSettings(object):
306 class TestUpdateGlobalHookSettings(object):
307 def test_update_raises_exception_when_data_incomplete(self):
307 def test_update_raises_exception_when_data_incomplete(self):
308 model = VcsSettingsModel()
308 model = VcsSettingsModel()
309
309
310 deleted_key = 'hooks_changegroup_repo_size'
310 deleted_key = 'hooks_changegroup_repo_size'
311 data = HOOKS_FORM_DATA.copy()
311 data = HOOKS_FORM_DATA.copy()
312 data.pop(deleted_key)
312 data.pop(deleted_key)
313
313
314 with pytest.raises(ValueError) as exc_info:
314 with pytest.raises(ValueError) as exc_info:
315 model.update_global_hook_settings(data)
315 model.update_global_hook_settings(data)
316 Session().commit()
316 Session().commit()
317
317
318 msg = 'The given data does not contain {} key'.format(deleted_key)
318 msg = 'The given data does not contain {} key'.format(deleted_key)
319 assert str(exc_info.value) == msg
319 assert str(exc_info.value) == msg
320
320
321 def test_update_global_hook_settings(self, settings_util):
321 def test_update_global_hook_settings(self, settings_util):
322 model = VcsSettingsModel()
322 model = VcsSettingsModel()
323 setting_mock = mock.MagicMock()
323 setting_mock = mock.MagicMock()
324 setting_mock.ui_active = False
324 setting_mock.ui_active = False
325 get_settings_patcher = mock.patch.object(
325 get_settings_patcher = mock.patch.object(
326 model.global_settings, 'get_ui_by_section_and_key',
326 model.global_settings, 'get_ui_by_section_and_key',
327 return_value=setting_mock)
327 return_value=setting_mock)
328 session_patcher = mock.patch('rhodecode.model.settings.Session')
328 session_patcher = mock.patch('rhodecode.model.settings.Session')
329 with get_settings_patcher as get_settings_mock, session_patcher:
329 with get_settings_patcher as get_settings_mock, session_patcher:
330 model.update_global_hook_settings(HOOKS_FORM_DATA)
330 model.update_global_hook_settings(HOOKS_FORM_DATA)
331 Session().commit()
331 Session().commit()
332
332
333 assert setting_mock.ui_active is True
333 assert setting_mock.ui_active is True
334 assert get_settings_mock.call_count == 3
334 assert get_settings_mock.call_count == 3
335
335
336
336
337 class TestCreateOrUpdateRepoGeneralSettings(object):
337 class TestCreateOrUpdateRepoGeneralSettings(object):
338 def test_calls_create_or_update_general_settings(self, repo_stub):
338 def test_calls_create_or_update_general_settings(self, repo_stub):
339 model = VcsSettingsModel(repo=repo_stub.repo_name)
339 model = VcsSettingsModel(repo=repo_stub.repo_name)
340 create_patch = mock.patch.object(
340 create_patch = mock.patch.object(
341 model, '_create_or_update_general_settings')
341 model, '_create_or_update_general_settings')
342 with create_patch as create_mock:
342 with create_patch as create_mock:
343 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
343 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
344 Session().commit()
344 Session().commit()
345
345
346 create_mock.assert_called_once_with(
346 create_mock.assert_called_once_with(
347 model.repo_settings, GENERAL_FORM_DATA)
347 model.repo_settings, GENERAL_FORM_DATA)
348
348
349 def test_raises_exception_when_repository_is_not_specified(self):
349 def test_raises_exception_when_repository_is_not_specified(self):
350 model = VcsSettingsModel()
350 model = VcsSettingsModel()
351 with pytest.raises(Exception) as exc_info:
351 with pytest.raises(Exception) as exc_info:
352 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
352 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
353 assert str(exc_info.value) == 'Repository is not specified'
353 assert str(exc_info.value) == 'Repository is not specified'
354
354
355
355
356 class TestCreateOrUpdatGlobalGeneralSettings(object):
356 class TestCreateOrUpdatGlobalGeneralSettings(object):
357 def test_calls_create_or_update_general_settings(self):
357 def test_calls_create_or_update_general_settings(self):
358 model = VcsSettingsModel()
358 model = VcsSettingsModel()
359 create_patch = mock.patch.object(
359 create_patch = mock.patch.object(
360 model, '_create_or_update_general_settings')
360 model, '_create_or_update_general_settings')
361 with create_patch as create_mock:
361 with create_patch as create_mock:
362 model.create_or_update_global_pr_settings(GENERAL_FORM_DATA)
362 model.create_or_update_global_pr_settings(GENERAL_FORM_DATA)
363 create_mock.assert_called_once_with(
363 create_mock.assert_called_once_with(
364 model.global_settings, GENERAL_FORM_DATA)
364 model.global_settings, GENERAL_FORM_DATA)
365
365
366
366
367 class TestCreateOrUpdateGeneralSettings(object):
367 class TestCreateOrUpdateGeneralSettings(object):
368 def test_create_when_no_repo_settings_found(self, repo_stub):
368 def test_create_when_no_repo_settings_found(self, repo_stub):
369 model = VcsSettingsModel(repo=repo_stub.repo_name)
369 model = VcsSettingsModel(repo=repo_stub.repo_name)
370 model._create_or_update_general_settings(
370 model._create_or_update_general_settings(
371 model.repo_settings, GENERAL_FORM_DATA)
371 model.repo_settings, GENERAL_FORM_DATA)
372
372
373 cleanup = []
373 cleanup = []
374 try:
374 try:
375 for name in model.GENERAL_SETTINGS:
375 for name in model.GENERAL_SETTINGS:
376 setting = model.repo_settings.get_setting_by_name(name)
376 setting = model.repo_settings.get_setting_by_name(name)
377 assert setting.app_settings_value is True
377 assert setting.app_settings_value is True
378 cleanup.append(setting)
378 cleanup.append(setting)
379 finally:
379 finally:
380 for setting in cleanup:
380 for setting in cleanup:
381 Session().delete(setting)
381 Session().delete(setting)
382 Session().commit()
382 Session().commit()
383
383
384 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
384 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
385 model = VcsSettingsModel(repo=repo_stub.repo_name)
385 model = VcsSettingsModel(repo=repo_stub.repo_name)
386
386
387 deleted_key = 'rhodecode_pr_merge_enabled'
387 deleted_key = 'rhodecode_pr_merge_enabled'
388 data = GENERAL_FORM_DATA.copy()
388 data = GENERAL_FORM_DATA.copy()
389 data.pop(deleted_key)
389 data.pop(deleted_key)
390
390
391 with pytest.raises(ValueError) as exc_info:
391 with pytest.raises(ValueError) as exc_info:
392 model._create_or_update_general_settings(model.repo_settings, data)
392 model._create_or_update_general_settings(model.repo_settings, data)
393 Session().commit()
393 Session().commit()
394
394
395 msg = 'The given data does not contain {} key'.format(deleted_key)
395 msg = 'The given data does not contain {} key'.format(deleted_key)
396 assert str(exc_info.value) == msg
396 assert str(exc_info.value) == msg
397
397
398 def test_update_when_repo_setting_found(self, repo_stub, settings_util):
398 def test_update_when_repo_setting_found(self, repo_stub, settings_util):
399 model = VcsSettingsModel(repo=repo_stub.repo_name)
399 model = VcsSettingsModel(repo=repo_stub.repo_name)
400 for name in model.GENERAL_SETTINGS:
400 for name in model.GENERAL_SETTINGS:
401 settings_util.create_repo_rhodecode_setting(
401 settings_util.create_repo_rhodecode_setting(
402 repo_stub, name, False, 'bool')
402 repo_stub, name, False, 'bool')
403
403
404 model._create_or_update_general_settings(
404 model._create_or_update_general_settings(
405 model.repo_settings, GENERAL_FORM_DATA)
405 model.repo_settings, GENERAL_FORM_DATA)
406 Session().commit()
406 Session().commit()
407
407
408 for name in model.GENERAL_SETTINGS:
408 for name in model.GENERAL_SETTINGS:
409 setting = model.repo_settings.get_setting_by_name(name)
409 setting = model.repo_settings.get_setting_by_name(name)
410 assert setting.app_settings_value is True
410 assert setting.app_settings_value is True
411
411
412
412
413 class TestCreateRepoSvnSettings(object):
413 class TestCreateRepoSvnSettings(object):
414 def test_calls_create_svn_settings(self, repo_stub):
414 def test_calls_create_svn_settings(self, repo_stub):
415 model = VcsSettingsModel(repo=repo_stub.repo_name)
415 model = VcsSettingsModel(repo=repo_stub.repo_name)
416 with mock.patch.object(model, '_create_svn_settings') as create_mock:
416 with mock.patch.object(model, '_create_svn_settings') as create_mock:
417 model.create_repo_svn_settings(SVN_FORM_DATA)
417 model.create_repo_svn_settings(SVN_FORM_DATA)
418 Session().commit()
418 Session().commit()
419
419
420 create_mock.assert_called_once_with(model.repo_settings, SVN_FORM_DATA)
420 create_mock.assert_called_once_with(model.repo_settings, SVN_FORM_DATA)
421
421
422 def test_raises_exception_when_repository_is_not_specified(self):
422 def test_raises_exception_when_repository_is_not_specified(self):
423 model = VcsSettingsModel()
423 model = VcsSettingsModel()
424 with pytest.raises(Exception) as exc_info:
424 with pytest.raises(Exception) as exc_info:
425 model.create_repo_svn_settings(SVN_FORM_DATA)
425 model.create_repo_svn_settings(SVN_FORM_DATA)
426 Session().commit()
426 Session().commit()
427
427
428 assert str(exc_info.value) == 'Repository is not specified'
428 assert str(exc_info.value) == 'Repository is not specified'
429
429
430
430
431 class TestCreateSvnSettings(object):
431 class TestCreateSvnSettings(object):
432 def test_create(self, repo_stub):
432 def test_create(self, repo_stub):
433 model = VcsSettingsModel(repo=repo_stub.repo_name)
433 model = VcsSettingsModel(repo=repo_stub.repo_name)
434 model._create_svn_settings(model.repo_settings, SVN_FORM_DATA)
434 model._create_svn_settings(model.repo_settings, SVN_FORM_DATA)
435 Session().commit()
435 Session().commit()
436
436
437 branch_ui = model.repo_settings.get_ui_by_section(
437 branch_ui = model.repo_settings.get_ui_by_section(
438 model.SVN_BRANCH_SECTION)
438 model.SVN_BRANCH_SECTION)
439 tag_ui = model.repo_settings.get_ui_by_section(
439 tag_ui = model.repo_settings.get_ui_by_section(
440 model.SVN_TAG_SECTION)
440 model.SVN_TAG_SECTION)
441
441
442 try:
442 try:
443 assert len(branch_ui) == 1
443 assert len(branch_ui) == 1
444 assert len(tag_ui) == 1
444 assert len(tag_ui) == 1
445 finally:
445 finally:
446 Session().delete(branch_ui[0])
446 Session().delete(branch_ui[0])
447 Session().delete(tag_ui[0])
447 Session().delete(tag_ui[0])
448 Session().commit()
448 Session().commit()
449
449
450 def test_create_tag(self, repo_stub):
450 def test_create_tag(self, repo_stub):
451 model = VcsSettingsModel(repo=repo_stub.repo_name)
451 model = VcsSettingsModel(repo=repo_stub.repo_name)
452 data = SVN_FORM_DATA.copy()
452 data = SVN_FORM_DATA.copy()
453 data.pop('new_svn_branch')
453 data.pop('new_svn_branch')
454 model._create_svn_settings(model.repo_settings, data)
454 model._create_svn_settings(model.repo_settings, data)
455 Session().commit()
455 Session().commit()
456
456
457 branch_ui = model.repo_settings.get_ui_by_section(
457 branch_ui = model.repo_settings.get_ui_by_section(
458 model.SVN_BRANCH_SECTION)
458 model.SVN_BRANCH_SECTION)
459 tag_ui = model.repo_settings.get_ui_by_section(
459 tag_ui = model.repo_settings.get_ui_by_section(
460 model.SVN_TAG_SECTION)
460 model.SVN_TAG_SECTION)
461
461
462 try:
462 try:
463 assert len(branch_ui) == 0
463 assert len(branch_ui) == 0
464 assert len(tag_ui) == 1
464 assert len(tag_ui) == 1
465 finally:
465 finally:
466 Session().delete(tag_ui[0])
466 Session().delete(tag_ui[0])
467 Session().commit()
467 Session().commit()
468
468
469 def test_create_nothing_when_no_svn_settings_specified(self, repo_stub):
469 def test_create_nothing_when_no_svn_settings_specified(self, repo_stub):
470 model = VcsSettingsModel(repo=repo_stub.repo_name)
470 model = VcsSettingsModel(repo=repo_stub.repo_name)
471 model._create_svn_settings(model.repo_settings, {})
471 model._create_svn_settings(model.repo_settings, {})
472 Session().commit()
472 Session().commit()
473
473
474 branch_ui = model.repo_settings.get_ui_by_section(
474 branch_ui = model.repo_settings.get_ui_by_section(
475 model.SVN_BRANCH_SECTION)
475 model.SVN_BRANCH_SECTION)
476 tag_ui = model.repo_settings.get_ui_by_section(
476 tag_ui = model.repo_settings.get_ui_by_section(
477 model.SVN_TAG_SECTION)
477 model.SVN_TAG_SECTION)
478
478
479 assert len(branch_ui) == 0
479 assert len(branch_ui) == 0
480 assert len(tag_ui) == 0
480 assert len(tag_ui) == 0
481
481
482 def test_create_nothing_when_empty_settings_specified(self, repo_stub):
482 def test_create_nothing_when_empty_settings_specified(self, repo_stub):
483 model = VcsSettingsModel(repo=repo_stub.repo_name)
483 model = VcsSettingsModel(repo=repo_stub.repo_name)
484 data = {
484 data = {
485 'new_svn_branch': '',
485 'new_svn_branch': '',
486 'new_svn_tag': ''
486 'new_svn_tag': ''
487 }
487 }
488 model._create_svn_settings(model.repo_settings, data)
488 model._create_svn_settings(model.repo_settings, data)
489 Session().commit()
489 Session().commit()
490
490
491 branch_ui = model.repo_settings.get_ui_by_section(
491 branch_ui = model.repo_settings.get_ui_by_section(
492 model.SVN_BRANCH_SECTION)
492 model.SVN_BRANCH_SECTION)
493 tag_ui = model.repo_settings.get_ui_by_section(
493 tag_ui = model.repo_settings.get_ui_by_section(
494 model.SVN_TAG_SECTION)
494 model.SVN_TAG_SECTION)
495
495
496 assert len(branch_ui) == 0
496 assert len(branch_ui) == 0
497 assert len(tag_ui) == 0
497 assert len(tag_ui) == 0
498
498
499
499
500 class TestCreateOrUpdateUi(object):
500 class TestCreateOrUpdateUi(object):
501 def test_create(self, repo_stub):
501 def test_create(self, repo_stub):
502 model = VcsSettingsModel(repo=repo_stub.repo_name)
502 model = VcsSettingsModel(repo=repo_stub.repo_name)
503 model._create_or_update_ui(
503 model._create_or_update_ui(
504 model.repo_settings, 'test-section', 'test-key', active=False,
504 model.repo_settings, 'test-section', 'test-key', active=False,
505 value='False')
505 value='False')
506 Session().commit()
506 Session().commit()
507
507
508 created_ui = model.repo_settings.get_ui_by_section_and_key(
508 created_ui = model.repo_settings.get_ui_by_section_and_key(
509 'test-section', 'test-key')
509 'test-section', 'test-key')
510
510
511 try:
511 try:
512 assert created_ui.ui_active is False
512 assert created_ui.ui_active is False
513 assert str2bool(created_ui.ui_value) is False
513 assert str2bool(created_ui.ui_value) is False
514 finally:
514 finally:
515 Session().delete(created_ui)
515 Session().delete(created_ui)
516 Session().commit()
516 Session().commit()
517
517
518 def test_update(self, repo_stub, settings_util):
518 def test_update(self, repo_stub, settings_util):
519 model = VcsSettingsModel(repo=repo_stub.repo_name)
519 model = VcsSettingsModel(repo=repo_stub.repo_name)
520 # care about only 3 first settings
520 # care about only 3 first settings
521 largefiles, phases, evolve = model.HG_SETTINGS[:3]
521 largefiles, phases, evolve = model.HG_SETTINGS[:3]
522
522
523 section = 'test-section'
523 section = 'test-section'
524 key = 'test-key'
524 key = 'test-key'
525 settings_util.create_repo_rhodecode_ui(
525 settings_util.create_repo_rhodecode_ui(
526 repo_stub, section, 'True', key=key, active=True)
526 repo_stub, section, 'True', key=key, active=True)
527
527
528 model._create_or_update_ui(
528 model._create_or_update_ui(
529 model.repo_settings, section, key, active=False, value='False')
529 model.repo_settings, section, key, active=False, value='False')
530 Session().commit()
530 Session().commit()
531
531
532 created_ui = model.repo_settings.get_ui_by_section_and_key(
532 created_ui = model.repo_settings.get_ui_by_section_and_key(
533 section, key)
533 section, key)
534 assert created_ui.ui_active is False
534 assert created_ui.ui_active is False
535 assert str2bool(created_ui.ui_value) is False
535 assert str2bool(created_ui.ui_value) is False
536
536
537
537
538 class TestCreateOrUpdateRepoHgSettings(object):
538 class TestCreateOrUpdateRepoHgSettings(object):
539 FORM_DATA = {
539 FORM_DATA = {
540 'extensions_largefiles': False,
540 'extensions_largefiles': False,
541 'extensions_evolve': False,
541 'extensions_evolve': False,
542 'phases_publish': False
542 'phases_publish': False
543 }
543 }
544
544
545 def test_creates_repo_hg_settings_when_data_is_correct(self, repo_stub):
545 def test_creates_repo_hg_settings_when_data_is_correct(self, repo_stub):
546 model = VcsSettingsModel(repo=repo_stub.repo_name)
546 model = VcsSettingsModel(repo=repo_stub.repo_name)
547 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
547 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
548 model.create_or_update_repo_hg_settings(self.FORM_DATA)
548 model.create_or_update_repo_hg_settings(self.FORM_DATA)
549 expected_calls = [
549 expected_calls = [
550 mock.call(model.repo_settings, 'extensions', 'largefiles', active=False, value=''),
550 mock.call(model.repo_settings, 'extensions', 'largefiles', active=False, value=''),
551 mock.call(model.repo_settings, 'extensions', 'evolve', active=False, value=''),
551 mock.call(model.repo_settings, 'extensions', 'evolve', active=False, value=''),
552 mock.call(model.repo_settings, 'experimental', 'evolution', active=False, value=''),
552 mock.call(model.repo_settings, 'experimental', 'evolution', active=False, value=''),
553 mock.call(model.repo_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
553 mock.call(model.repo_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
554 mock.call(model.repo_settings, 'extensions', 'topic', active=False, value=''),
554 mock.call(model.repo_settings, 'extensions', 'topic', active=False, value=''),
555 mock.call(model.repo_settings, 'phases', 'publish', value='False'),
555 mock.call(model.repo_settings, 'phases', 'publish', value='False'),
556 ]
556 ]
557 assert expected_calls == create_mock.call_args_list
557 assert expected_calls == create_mock.call_args_list
558
558
559 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
559 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
560 def test_key_is_not_found(self, repo_stub, field_to_remove):
560 def test_key_is_not_found(self, repo_stub, field_to_remove):
561 model = VcsSettingsModel(repo=repo_stub.repo_name)
561 model = VcsSettingsModel(repo=repo_stub.repo_name)
562 data = self.FORM_DATA.copy()
562 data = self.FORM_DATA.copy()
563 data.pop(field_to_remove)
563 data.pop(field_to_remove)
564 with pytest.raises(ValueError) as exc_info:
564 with pytest.raises(ValueError) as exc_info:
565 model.create_or_update_repo_hg_settings(data)
565 model.create_or_update_repo_hg_settings(data)
566 Session().commit()
566 Session().commit()
567
567
568 expected_message = 'The given data does not contain {} key'.format(
568 expected_message = 'The given data does not contain {} key'.format(
569 field_to_remove)
569 field_to_remove)
570 assert str(exc_info.value) == expected_message
570 assert str(exc_info.value) == expected_message
571
571
572 def test_create_raises_exception_when_repository_not_specified(self):
572 def test_create_raises_exception_when_repository_not_specified(self):
573 model = VcsSettingsModel()
573 model = VcsSettingsModel()
574 with pytest.raises(Exception) as exc_info:
574 with pytest.raises(Exception) as exc_info:
575 model.create_or_update_repo_hg_settings(self.FORM_DATA)
575 model.create_or_update_repo_hg_settings(self.FORM_DATA)
576 Session().commit()
576 Session().commit()
577
577
578 assert str(exc_info.value) == 'Repository is not specified'
578 assert str(exc_info.value) == 'Repository is not specified'
579
579
580
580
581 class TestUpdateGlobalSslSetting(object):
581 class TestUpdateGlobalSslSetting(object):
582 def test_updates_global_hg_settings(self):
582 def test_updates_global_hg_settings(self):
583 model = VcsSettingsModel()
583 model = VcsSettingsModel()
584 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
584 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
585 model.update_global_ssl_setting('False')
585 model.update_global_ssl_setting('False')
586 Session().commit()
586 Session().commit()
587
587
588 create_mock.assert_called_once_with(
588 create_mock.assert_called_once_with(
589 model.global_settings, 'web', 'push_ssl', value='False')
589 model.global_settings, 'web', 'push_ssl', value='False')
590
590
591
591
592 class TestUpdateGlobalPathSetting(object):
593 def test_updates_global_path_settings(self):
594 model = VcsSettingsModel()
595 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
596 model.update_global_path_setting('False')
597 Session().commit()
598
599 create_mock.assert_called_once_with(
600 model.global_settings, 'paths', '/', value='False')
601
602
603 class TestCreateOrUpdateGlobalHgSettings(object):
592 class TestCreateOrUpdateGlobalHgSettings(object):
604 FORM_DATA = {
593 FORM_DATA = {
605 'extensions_largefiles': False,
594 'extensions_largefiles': False,
606 'largefiles_usercache': '/example/largefiles-store',
595 'largefiles_usercache': '/example/largefiles-store',
607 'phases_publish': False,
596 'phases_publish': False,
608 'extensions_evolve': False
597 'extensions_evolve': False
609 }
598 }
610
599
611 def test_creates_repo_hg_settings_when_data_is_correct(self):
600 def test_creates_repo_hg_settings_when_data_is_correct(self):
612 model = VcsSettingsModel()
601 model = VcsSettingsModel()
613 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
602 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
614 model.create_or_update_global_hg_settings(self.FORM_DATA)
603 model.create_or_update_global_hg_settings(self.FORM_DATA)
615 Session().commit()
604 Session().commit()
616
605
617 expected_calls = [
606 expected_calls = [
618 mock.call(model.global_settings, 'extensions', 'largefiles', active=False, value=''),
607 mock.call(model.global_settings, 'extensions', 'largefiles', active=False, value=''),
619 mock.call(model.global_settings, 'largefiles', 'usercache', value='/example/largefiles-store'),
608 mock.call(model.global_settings, 'largefiles', 'usercache', value='/example/largefiles-store'),
620 mock.call(model.global_settings, 'phases', 'publish', value='False'),
609 mock.call(model.global_settings, 'phases', 'publish', value='False'),
621 mock.call(model.global_settings, 'extensions', 'evolve', active=False, value=''),
610 mock.call(model.global_settings, 'extensions', 'evolve', active=False, value=''),
622 mock.call(model.global_settings, 'experimental', 'evolution', active=False, value=''),
611 mock.call(model.global_settings, 'experimental', 'evolution', active=False, value=''),
623 mock.call(model.global_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
612 mock.call(model.global_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
624 mock.call(model.global_settings, 'extensions', 'topic', active=False, value=''),
613 mock.call(model.global_settings, 'extensions', 'topic', active=False, value=''),
625 ]
614 ]
626
615
627 assert expected_calls == create_mock.call_args_list
616 assert expected_calls == create_mock.call_args_list
628
617
629 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
618 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
630 def test_key_is_not_found(self, repo_stub, field_to_remove):
619 def test_key_is_not_found(self, repo_stub, field_to_remove):
631 model = VcsSettingsModel(repo=repo_stub.repo_name)
620 model = VcsSettingsModel(repo=repo_stub.repo_name)
632 data = self.FORM_DATA.copy()
621 data = self.FORM_DATA.copy()
633 data.pop(field_to_remove)
622 data.pop(field_to_remove)
634 with pytest.raises(Exception) as exc_info:
623 with pytest.raises(Exception) as exc_info:
635 model.create_or_update_global_hg_settings(data)
624 model.create_or_update_global_hg_settings(data)
636 Session().commit()
625 Session().commit()
637
626
638 expected_message = 'The given data does not contain {} key'.format(
627 expected_message = 'The given data does not contain {} key'.format(
639 field_to_remove)
628 field_to_remove)
640 assert str(exc_info.value) == expected_message
629 assert str(exc_info.value) == expected_message
641
630
642
631
643 class TestCreateOrUpdateGlobalGitSettings(object):
632 class TestCreateOrUpdateGlobalGitSettings(object):
644 FORM_DATA = {
633 FORM_DATA = {
645 'vcs_git_lfs_enabled': False,
634 'vcs_git_lfs_enabled': False,
646 'vcs_git_lfs_store_location': '/example/lfs-store',
635 'vcs_git_lfs_store_location': '/example/lfs-store',
647 }
636 }
648
637
649 def test_creates_repo_hg_settings_when_data_is_correct(self):
638 def test_creates_repo_hg_settings_when_data_is_correct(self):
650 model = VcsSettingsModel()
639 model = VcsSettingsModel()
651 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
640 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
652 model.create_or_update_global_git_settings(self.FORM_DATA)
641 model.create_or_update_global_git_settings(self.FORM_DATA)
653 Session().commit()
642 Session().commit()
654
643
655 expected_calls = [
644 expected_calls = [
656 mock.call(model.global_settings, 'vcs_git_lfs', 'enabled', active=False, value=False),
645 mock.call(model.global_settings, 'vcs_git_lfs', 'enabled', active=False, value=False),
657 mock.call(model.global_settings, 'vcs_git_lfs', 'store_location', value='/example/lfs-store'),
646 mock.call(model.global_settings, 'vcs_git_lfs', 'store_location', value='/example/lfs-store'),
658 ]
647 ]
659 assert expected_calls == create_mock.call_args_list
648 assert expected_calls == create_mock.call_args_list
660
649
661
650
662 class TestDeleteRepoSvnPattern(object):
651 class TestDeleteRepoSvnPattern(object):
663 def test_success_when_repo_is_set(self, backend_svn, settings_util):
652 def test_success_when_repo_is_set(self, backend_svn, settings_util):
664 repo = backend_svn.create_repo()
653 repo = backend_svn.create_repo()
665 repo_name = repo.repo_name
654 repo_name = repo.repo_name
666
655
667 model = VcsSettingsModel(repo=repo_name)
656 model = VcsSettingsModel(repo=repo_name)
668 entry = settings_util.create_repo_rhodecode_ui(
657 entry = settings_util.create_repo_rhodecode_ui(
669 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch')
658 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch')
670 Session().commit()
659 Session().commit()
671
660
672 model.delete_repo_svn_pattern(entry.ui_id)
661 model.delete_repo_svn_pattern(entry.ui_id)
673
662
674 def test_fail_when_delete_id_from_other_repo(self, backend_svn):
663 def test_fail_when_delete_id_from_other_repo(self, backend_svn):
675 repo_name = backend_svn.repo_name
664 repo_name = backend_svn.repo_name
676 model = VcsSettingsModel(repo=repo_name)
665 model = VcsSettingsModel(repo=repo_name)
677 delete_ui_patch = mock.patch.object(model.repo_settings, 'delete_ui')
666 delete_ui_patch = mock.patch.object(model.repo_settings, 'delete_ui')
678 with delete_ui_patch as delete_ui_mock:
667 with delete_ui_patch as delete_ui_mock:
679 model.delete_repo_svn_pattern(123)
668 model.delete_repo_svn_pattern(123)
680 Session().commit()
669 Session().commit()
681
670
682 delete_ui_mock.assert_called_once_with(-1)
671 delete_ui_mock.assert_called_once_with(-1)
683
672
684 def test_raises_exception_when_repository_is_not_specified(self):
673 def test_raises_exception_when_repository_is_not_specified(self):
685 model = VcsSettingsModel()
674 model = VcsSettingsModel()
686 with pytest.raises(Exception) as exc_info:
675 with pytest.raises(Exception) as exc_info:
687 model.delete_repo_svn_pattern(123)
676 model.delete_repo_svn_pattern(123)
688 assert str(exc_info.value) == 'Repository is not specified'
677 assert str(exc_info.value) == 'Repository is not specified'
689
678
690
679
691 class TestDeleteGlobalSvnPattern(object):
680 class TestDeleteGlobalSvnPattern(object):
692 def test_delete_global_svn_pattern_calls_delete_ui(self):
681 def test_delete_global_svn_pattern_calls_delete_ui(self):
693 model = VcsSettingsModel()
682 model = VcsSettingsModel()
694 delete_ui_patch = mock.patch.object(model.global_settings, 'delete_ui')
683 delete_ui_patch = mock.patch.object(model.global_settings, 'delete_ui')
695 with delete_ui_patch as delete_ui_mock:
684 with delete_ui_patch as delete_ui_mock:
696 model.delete_global_svn_pattern(123)
685 model.delete_global_svn_pattern(123)
697 delete_ui_mock.assert_called_once_with(123)
686 delete_ui_mock.assert_called_once_with(123)
698
687
699
688
700 class TestFilterUiSettings(object):
689 class TestFilterUiSettings(object):
701 def test_settings_are_filtered(self):
690 def test_settings_are_filtered(self):
702 model = VcsSettingsModel()
691 model = VcsSettingsModel()
703 repo_settings = [
692 repo_settings = [
704 UiSetting('extensions', 'largefiles', '', True),
693 UiSetting('extensions', 'largefiles', '', True),
705 UiSetting('phases', 'publish', 'True', True),
694 UiSetting('phases', 'publish', 'True', True),
706 UiSetting('hooks', 'changegroup.repo_size', 'hook', True),
695 UiSetting('hooks', 'changegroup.repo_size', 'hook', True),
707 UiSetting('hooks', 'changegroup.push_logger', 'hook', True),
696 UiSetting('hooks', 'changegroup.push_logger', 'hook', True),
708 UiSetting('hooks', 'outgoing.pull_logger', 'hook', True),
697 UiSetting('hooks', 'outgoing.pull_logger', 'hook', True),
709 UiSetting(
698 UiSetting(
710 'vcs_svn_branch', '84223c972204fa545ca1b22dac7bef5b68d7442d',
699 'vcs_svn_branch', '84223c972204fa545ca1b22dac7bef5b68d7442d',
711 'test_branch', True),
700 'test_branch', True),
712 UiSetting(
701 UiSetting(
713 'vcs_svn_tag', '84229c972204fa545ca1b22dac7bef5b68d7442d',
702 'vcs_svn_tag', '84229c972204fa545ca1b22dac7bef5b68d7442d',
714 'test_tag', True),
703 'test_tag', True),
715 ]
704 ]
716 non_repo_settings = [
705 non_repo_settings = [
717 UiSetting('largefiles', 'usercache', '/example/largefiles-store', True),
706 UiSetting('largefiles', 'usercache', '/example/largefiles-store', True),
718 UiSetting('test', 'outgoing.pull_logger', 'hook', True),
707 UiSetting('test', 'outgoing.pull_logger', 'hook', True),
719 UiSetting('hooks', 'test2', 'hook', True),
708 UiSetting('hooks', 'test2', 'hook', True),
720 UiSetting(
709 UiSetting(
721 'vcs_svn_repo', '84229c972204fa545ca1b22dac7bef5b68d7442d',
710 'vcs_svn_repo', '84229c972204fa545ca1b22dac7bef5b68d7442d',
722 'test_tag', True),
711 'test_tag', True),
723 ]
712 ]
724 settings = repo_settings + non_repo_settings
713 settings = repo_settings + non_repo_settings
725 filtered_settings = model._filter_ui_settings(settings)
714 filtered_settings = model._filter_ui_settings(settings)
726 assert sorted(filtered_settings) == sorted(repo_settings)
715 assert sorted(filtered_settings) == sorted(repo_settings)
727
716
728
717
729 class TestFilterGeneralSettings(object):
718 class TestFilterGeneralSettings(object):
730 def test_settings_are_filtered(self):
719 def test_settings_are_filtered(self):
731 model = VcsSettingsModel()
720 model = VcsSettingsModel()
732 settings = {
721 settings = {
733 'rhodecode_abcde': 'value1',
722 'rhodecode_abcde': 'value1',
734 'rhodecode_vwxyz': 'value2',
723 'rhodecode_vwxyz': 'value2',
735 }
724 }
736 general_settings = {
725 general_settings = {
737 'rhodecode_{}'.format(key): 'value'
726 'rhodecode_{}'.format(key): 'value'
738 for key in VcsSettingsModel.GENERAL_SETTINGS
727 for key in VcsSettingsModel.GENERAL_SETTINGS
739 }
728 }
740 settings.update(general_settings)
729 settings.update(general_settings)
741
730
742 filtered_settings = model._filter_general_settings(general_settings)
731 filtered_settings = model._filter_general_settings(general_settings)
743 assert sorted(filtered_settings) == sorted(general_settings)
732 assert sorted(filtered_settings) == sorted(general_settings)
744
733
745
734
746 class TestGetRepoUiSettings(object):
735 class TestGetRepoUiSettings(object):
747 def test_global_uis_are_returned_when_no_repo_uis_found(
736 def test_global_uis_are_returned_when_no_repo_uis_found(
748 self, repo_stub):
737 self, repo_stub):
749 model = VcsSettingsModel(repo=repo_stub.repo_name)
738 model = VcsSettingsModel(repo=repo_stub.repo_name)
750 result = model.get_repo_ui_settings()
739 result = model.get_repo_ui_settings()
751 svn_sections = (
740 svn_sections = (
752 VcsSettingsModel.SVN_TAG_SECTION,
741 VcsSettingsModel.SVN_TAG_SECTION,
753 VcsSettingsModel.SVN_BRANCH_SECTION)
742 VcsSettingsModel.SVN_BRANCH_SECTION)
754 expected_result = [
743 expected_result = [
755 s for s in model.global_settings.get_ui()
744 s for s in model.global_settings.get_ui()
756 if s.section not in svn_sections]
745 if s.section not in svn_sections]
757 assert sorted(result) == sorted(expected_result)
746 assert sorted(result) == sorted(expected_result)
758
747
759 def test_repo_uis_are_overriding_global_uis(
748 def test_repo_uis_are_overriding_global_uis(
760 self, repo_stub, settings_util):
749 self, repo_stub, settings_util):
761 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
750 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
762 settings_util.create_repo_rhodecode_ui(
751 settings_util.create_repo_rhodecode_ui(
763 repo_stub, section, 'repo', key=key, active=False)
752 repo_stub, section, 'repo', key=key, active=False)
764 model = VcsSettingsModel(repo=repo_stub.repo_name)
753 model = VcsSettingsModel(repo=repo_stub.repo_name)
765 result = model.get_repo_ui_settings()
754 result = model.get_repo_ui_settings()
766 for setting in result:
755 for setting in result:
767 locator = (setting.section, setting.key)
756 locator = (setting.section, setting.key)
768 if locator in VcsSettingsModel.HOOKS_SETTINGS:
757 if locator in VcsSettingsModel.HOOKS_SETTINGS:
769 assert setting.value == 'repo'
758 assert setting.value == 'repo'
770
759
771 assert setting.active is False
760 assert setting.active is False
772
761
773 def test_global_svn_patterns_are_not_in_list(
762 def test_global_svn_patterns_are_not_in_list(
774 self, repo_stub, settings_util):
763 self, repo_stub, settings_util):
775 svn_sections = (
764 svn_sections = (
776 VcsSettingsModel.SVN_TAG_SECTION,
765 VcsSettingsModel.SVN_TAG_SECTION,
777 VcsSettingsModel.SVN_BRANCH_SECTION)
766 VcsSettingsModel.SVN_BRANCH_SECTION)
778 for section in svn_sections:
767 for section in svn_sections:
779 settings_util.create_rhodecode_ui(
768 settings_util.create_rhodecode_ui(
780 section, 'repo', key='deadbeef' + section, active=False)
769 section, 'repo', key='deadbeef' + section, active=False)
781 Session().commit()
770 Session().commit()
782
771
783 model = VcsSettingsModel(repo=repo_stub.repo_name)
772 model = VcsSettingsModel(repo=repo_stub.repo_name)
784 result = model.get_repo_ui_settings()
773 result = model.get_repo_ui_settings()
785 for setting in result:
774 for setting in result:
786 assert setting.section not in svn_sections
775 assert setting.section not in svn_sections
787
776
788 def test_repo_uis_filtered_by_section_are_returned(
777 def test_repo_uis_filtered_by_section_are_returned(
789 self, repo_stub, settings_util):
778 self, repo_stub, settings_util):
790 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
779 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
791 settings_util.create_repo_rhodecode_ui(
780 settings_util.create_repo_rhodecode_ui(
792 repo_stub, section, 'repo', key=key, active=False)
781 repo_stub, section, 'repo', key=key, active=False)
793 model = VcsSettingsModel(repo=repo_stub.repo_name)
782 model = VcsSettingsModel(repo=repo_stub.repo_name)
794 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
783 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
795 result = model.get_repo_ui_settings(section=section)
784 result = model.get_repo_ui_settings(section=section)
796 for setting in result:
785 for setting in result:
797 assert setting.section == section
786 assert setting.section == section
798
787
799 def test_repo_uis_filtered_by_key_are_returned(
788 def test_repo_uis_filtered_by_key_are_returned(
800 self, repo_stub, settings_util):
789 self, repo_stub, settings_util):
801 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
790 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
802 settings_util.create_repo_rhodecode_ui(
791 settings_util.create_repo_rhodecode_ui(
803 repo_stub, section, 'repo', key=key, active=False)
792 repo_stub, section, 'repo', key=key, active=False)
804 model = VcsSettingsModel(repo=repo_stub.repo_name)
793 model = VcsSettingsModel(repo=repo_stub.repo_name)
805 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
794 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
806 result = model.get_repo_ui_settings(key=key)
795 result = model.get_repo_ui_settings(key=key)
807 for setting in result:
796 for setting in result:
808 assert setting.key == key
797 assert setting.key == key
809
798
810 def test_raises_exception_when_repository_is_not_specified(self):
799 def test_raises_exception_when_repository_is_not_specified(self):
811 model = VcsSettingsModel()
800 model = VcsSettingsModel()
812 with pytest.raises(Exception) as exc_info:
801 with pytest.raises(Exception) as exc_info:
813 model.get_repo_ui_settings()
802 model.get_repo_ui_settings()
814 assert str(exc_info.value) == 'Repository is not specified'
803 assert str(exc_info.value) == 'Repository is not specified'
815
804
816
805
817 class TestGetRepoGeneralSettings(object):
806 class TestGetRepoGeneralSettings(object):
818 def test_global_settings_are_returned_when_no_repo_settings_found(
807 def test_global_settings_are_returned_when_no_repo_settings_found(
819 self, repo_stub):
808 self, repo_stub):
820 model = VcsSettingsModel(repo=repo_stub.repo_name)
809 model = VcsSettingsModel(repo=repo_stub.repo_name)
821 result = model.get_repo_general_settings()
810 result = model.get_repo_general_settings()
822 expected_result = model.global_settings.get_all_settings()
811 expected_result = model.global_settings.get_all_settings()
823 assert sorted(result) == sorted(expected_result)
812 assert sorted(result) == sorted(expected_result)
824
813
825 def test_repo_uis_are_overriding_global_uis(
814 def test_repo_uis_are_overriding_global_uis(
826 self, repo_stub, settings_util):
815 self, repo_stub, settings_util):
827 for key in VcsSettingsModel.GENERAL_SETTINGS:
816 for key in VcsSettingsModel.GENERAL_SETTINGS:
828 settings_util.create_repo_rhodecode_setting(
817 settings_util.create_repo_rhodecode_setting(
829 repo_stub, key, 'abcde', type_='unicode')
818 repo_stub, key, 'abcde', type_='unicode')
830 Session().commit()
819 Session().commit()
831
820
832 model = VcsSettingsModel(repo=repo_stub.repo_name)
821 model = VcsSettingsModel(repo=repo_stub.repo_name)
833 result = model.get_repo_ui_settings()
822 result = model.get_repo_ui_settings()
834 for key in result:
823 for key in result:
835 if key in VcsSettingsModel.GENERAL_SETTINGS:
824 if key in VcsSettingsModel.GENERAL_SETTINGS:
836 assert result[key] == 'abcde'
825 assert result[key] == 'abcde'
837
826
838 def test_raises_exception_when_repository_is_not_specified(self):
827 def test_raises_exception_when_repository_is_not_specified(self):
839 model = VcsSettingsModel()
828 model = VcsSettingsModel()
840 with pytest.raises(Exception) as exc_info:
829 with pytest.raises(Exception) as exc_info:
841 model.get_repo_general_settings()
830 model.get_repo_general_settings()
842 assert str(exc_info.value) == 'Repository is not specified'
831 assert str(exc_info.value) == 'Repository is not specified'
843
832
844
833
845 class TestGetGlobalGeneralSettings(object):
834 class TestGetGlobalGeneralSettings(object):
846 def test_global_settings_are_returned(self, repo_stub):
835 def test_global_settings_are_returned(self, repo_stub):
847 model = VcsSettingsModel()
836 model = VcsSettingsModel()
848 result = model.get_global_general_settings()
837 result = model.get_global_general_settings()
849 expected_result = model.global_settings.get_all_settings()
838 expected_result = model.global_settings.get_all_settings()
850 assert sorted(result) == sorted(expected_result)
839 assert sorted(result) == sorted(expected_result)
851
840
852 def test_repo_uis_are_not_overriding_global_uis(
841 def test_repo_uis_are_not_overriding_global_uis(
853 self, repo_stub, settings_util):
842 self, repo_stub, settings_util):
854 for key in VcsSettingsModel.GENERAL_SETTINGS:
843 for key in VcsSettingsModel.GENERAL_SETTINGS:
855 settings_util.create_repo_rhodecode_setting(
844 settings_util.create_repo_rhodecode_setting(
856 repo_stub, key, 'abcde', type_='unicode')
845 repo_stub, key, 'abcde', type_='unicode')
857 Session().commit()
846 Session().commit()
858
847
859 model = VcsSettingsModel(repo=repo_stub.repo_name)
848 model = VcsSettingsModel(repo=repo_stub.repo_name)
860 result = model.get_global_general_settings()
849 result = model.get_global_general_settings()
861 expected_result = model.global_settings.get_all_settings()
850 expected_result = model.global_settings.get_all_settings()
862 assert sorted(result) == sorted(expected_result)
851 assert sorted(result) == sorted(expected_result)
863
852
864
853
865 class TestGetGlobalUiSettings(object):
854 class TestGetGlobalUiSettings(object):
866 def test_global_uis_are_returned(self, repo_stub):
855 def test_global_uis_are_returned(self, repo_stub):
867 model = VcsSettingsModel()
856 model = VcsSettingsModel()
868 result = model.get_global_ui_settings()
857 result = model.get_global_ui_settings()
869 expected_result = model.global_settings.get_ui()
858 expected_result = model.global_settings.get_ui()
870 assert sorted(result) == sorted(expected_result)
859 assert sorted(result) == sorted(expected_result)
871
860
872 def test_repo_uis_are_not_overriding_global_uis(
861 def test_repo_uis_are_not_overriding_global_uis(
873 self, repo_stub, settings_util):
862 self, repo_stub, settings_util):
874 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
863 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
875 settings_util.create_repo_rhodecode_ui(
864 settings_util.create_repo_rhodecode_ui(
876 repo_stub, section, 'repo', key=key, active=False)
865 repo_stub, section, 'repo', key=key, active=False)
877 Session().commit()
866 Session().commit()
878
867
879 model = VcsSettingsModel(repo=repo_stub.repo_name)
868 model = VcsSettingsModel(repo=repo_stub.repo_name)
880 result = model.get_global_ui_settings()
869 result = model.get_global_ui_settings()
881 expected_result = model.global_settings.get_ui()
870 expected_result = model.global_settings.get_ui()
882 assert sorted(result) == sorted(expected_result)
871 assert sorted(result) == sorted(expected_result)
883
872
884 def test_ui_settings_filtered_by_section(
873 def test_ui_settings_filtered_by_section(
885 self, repo_stub, settings_util):
874 self, repo_stub, settings_util):
886 model = VcsSettingsModel(repo=repo_stub.repo_name)
875 model = VcsSettingsModel(repo=repo_stub.repo_name)
887 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
876 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
888 result = model.get_global_ui_settings(section=section)
877 result = model.get_global_ui_settings(section=section)
889 expected_result = model.global_settings.get_ui(section=section)
878 expected_result = model.global_settings.get_ui(section=section)
890 assert sorted(result) == sorted(expected_result)
879 assert sorted(result) == sorted(expected_result)
891
880
892 def test_ui_settings_filtered_by_key(
881 def test_ui_settings_filtered_by_key(
893 self, repo_stub, settings_util):
882 self, repo_stub, settings_util):
894 model = VcsSettingsModel(repo=repo_stub.repo_name)
883 model = VcsSettingsModel(repo=repo_stub.repo_name)
895 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
884 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
896 result = model.get_global_ui_settings(key=key)
885 result = model.get_global_ui_settings(key=key)
897 expected_result = model.global_settings.get_ui(key=key)
886 expected_result = model.global_settings.get_ui(key=key)
898 assert sorted(result) == sorted(expected_result)
887 assert sorted(result) == sorted(expected_result)
899
888
900
889
901 class TestGetGeneralSettings(object):
890 class TestGetGeneralSettings(object):
902 def test_global_settings_are_returned_when_inherited_is_true(
891 def test_global_settings_are_returned_when_inherited_is_true(
903 self, repo_stub, settings_util):
892 self, repo_stub, settings_util):
904 model = VcsSettingsModel(repo=repo_stub.repo_name)
893 model = VcsSettingsModel(repo=repo_stub.repo_name)
905 model.inherit_global_settings = True
894 model.inherit_global_settings = True
906 for key in VcsSettingsModel.GENERAL_SETTINGS:
895 for key in VcsSettingsModel.GENERAL_SETTINGS:
907 settings_util.create_repo_rhodecode_setting(
896 settings_util.create_repo_rhodecode_setting(
908 repo_stub, key, 'abcde', type_='unicode')
897 repo_stub, key, 'abcde', type_='unicode')
909 Session().commit()
898 Session().commit()
910
899
911 result = model.get_general_settings()
900 result = model.get_general_settings()
912 expected_result = model.get_global_general_settings()
901 expected_result = model.get_global_general_settings()
913 assert sorted(result) == sorted(expected_result)
902 assert sorted(result) == sorted(expected_result)
914
903
915 def test_repo_settings_are_returned_when_inherited_is_false(
904 def test_repo_settings_are_returned_when_inherited_is_false(
916 self, repo_stub, settings_util):
905 self, repo_stub, settings_util):
917 model = VcsSettingsModel(repo=repo_stub.repo_name)
906 model = VcsSettingsModel(repo=repo_stub.repo_name)
918 model.inherit_global_settings = False
907 model.inherit_global_settings = False
919 for key in VcsSettingsModel.GENERAL_SETTINGS:
908 for key in VcsSettingsModel.GENERAL_SETTINGS:
920 settings_util.create_repo_rhodecode_setting(
909 settings_util.create_repo_rhodecode_setting(
921 repo_stub, key, 'abcde', type_='unicode')
910 repo_stub, key, 'abcde', type_='unicode')
922 Session().commit()
911 Session().commit()
923
912
924 result = model.get_general_settings()
913 result = model.get_general_settings()
925 expected_result = model.get_repo_general_settings()
914 expected_result = model.get_repo_general_settings()
926 assert sorted(result) == sorted(expected_result)
915 assert sorted(result) == sorted(expected_result)
927
916
928 def test_global_settings_are_returned_when_no_repository_specified(self):
917 def test_global_settings_are_returned_when_no_repository_specified(self):
929 model = VcsSettingsModel()
918 model = VcsSettingsModel()
930 result = model.get_general_settings()
919 result = model.get_general_settings()
931 expected_result = model.get_global_general_settings()
920 expected_result = model.get_global_general_settings()
932 assert sorted(result) == sorted(expected_result)
921 assert sorted(result) == sorted(expected_result)
933
922
934
923
935 class TestGetUiSettings(object):
924 class TestGetUiSettings(object):
936 def test_global_settings_are_returned_when_inherited_is_true(
925 def test_global_settings_are_returned_when_inherited_is_true(
937 self, repo_stub, settings_util):
926 self, repo_stub, settings_util):
938 model = VcsSettingsModel(repo=repo_stub.repo_name)
927 model = VcsSettingsModel(repo=repo_stub.repo_name)
939 model.inherit_global_settings = True
928 model.inherit_global_settings = True
940 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
929 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
941 settings_util.create_repo_rhodecode_ui(
930 settings_util.create_repo_rhodecode_ui(
942 repo_stub, section, 'repo', key=key, active=True)
931 repo_stub, section, 'repo', key=key, active=True)
943 Session().commit()
932 Session().commit()
944
933
945 result = model.get_ui_settings()
934 result = model.get_ui_settings()
946 expected_result = model.get_global_ui_settings()
935 expected_result = model.get_global_ui_settings()
947 assert sorted(result) == sorted(expected_result)
936 assert sorted(result) == sorted(expected_result)
948
937
949 def test_repo_settings_are_returned_when_inherited_is_false(
938 def test_repo_settings_are_returned_when_inherited_is_false(
950 self, repo_stub, settings_util):
939 self, repo_stub, settings_util):
951 model = VcsSettingsModel(repo=repo_stub.repo_name)
940 model = VcsSettingsModel(repo=repo_stub.repo_name)
952 model.inherit_global_settings = False
941 model.inherit_global_settings = False
953 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
942 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
954 settings_util.create_repo_rhodecode_ui(
943 settings_util.create_repo_rhodecode_ui(
955 repo_stub, section, 'repo', key=key, active=True)
944 repo_stub, section, 'repo', key=key, active=True)
956 Session().commit()
945 Session().commit()
957
946
958 result = model.get_ui_settings()
947 result = model.get_ui_settings()
959 expected_result = model.get_repo_ui_settings()
948 expected_result = model.get_repo_ui_settings()
960 assert sorted(result) == sorted(expected_result)
949 assert sorted(result) == sorted(expected_result)
961
950
962 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
951 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
963 model = VcsSettingsModel(repo=repo_stub.repo_name)
952 model = VcsSettingsModel(repo=repo_stub.repo_name)
964 model.inherit_global_settings = False
953 model.inherit_global_settings = False
965
954
966 args = ('section', 'key')
955 args = ('section', 'key')
967 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
956 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
968 model.get_ui_settings(*args)
957 model.get_ui_settings(*args)
969 Session().commit()
958 Session().commit()
970
959
971 settings_mock.assert_called_once_with(*args)
960 settings_mock.assert_called_once_with(*args)
972
961
973 def test_global_settings_filtered_by_section_and_key(self):
962 def test_global_settings_filtered_by_section_and_key(self):
974 model = VcsSettingsModel()
963 model = VcsSettingsModel()
975 args = ('section', 'key')
964 args = ('section', 'key')
976 with mock.patch.object(model, 'get_global_ui_settings') as (
965 with mock.patch.object(model, 'get_global_ui_settings') as (
977 settings_mock):
966 settings_mock):
978 model.get_ui_settings(*args)
967 model.get_ui_settings(*args)
979 settings_mock.assert_called_once_with(*args)
968 settings_mock.assert_called_once_with(*args)
980
969
981 def test_global_settings_are_returned_when_no_repository_specified(self):
970 def test_global_settings_are_returned_when_no_repository_specified(self):
982 model = VcsSettingsModel()
971 model = VcsSettingsModel()
983 result = model.get_ui_settings()
972 result = model.get_ui_settings()
984 expected_result = model.get_global_ui_settings()
973 expected_result = model.get_global_ui_settings()
985 assert sorted(result) == sorted(expected_result)
974 assert sorted(result) == sorted(expected_result)
986
975
987
976
988 class TestGetSvnPatterns(object):
977 class TestGetSvnPatterns(object):
989 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
978 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
990 model = VcsSettingsModel(repo=repo_stub.repo_name)
979 model = VcsSettingsModel(repo=repo_stub.repo_name)
991 args = ('section', )
980 args = ('section', )
992 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
981 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
993 model.get_svn_patterns(*args)
982 model.get_svn_patterns(*args)
994
983
995 Session().commit()
984 Session().commit()
996 settings_mock.assert_called_once_with(*args)
985 settings_mock.assert_called_once_with(*args)
997
986
998 def test_global_settings_filtered_by_section_and_key(self):
987 def test_global_settings_filtered_by_section_and_key(self):
999 model = VcsSettingsModel()
988 model = VcsSettingsModel()
1000 args = ('section', )
989 args = ('section', )
1001 with mock.patch.object(model, 'get_global_ui_settings') as (
990 with mock.patch.object(model, 'get_global_ui_settings') as (
1002 settings_mock):
991 settings_mock):
1003 model.get_svn_patterns(*args)
992 model.get_svn_patterns(*args)
1004 settings_mock.assert_called_once_with(*args)
993 settings_mock.assert_called_once_with(*args)
1005
994
1006
995
1007 class TestGetReposLocation(object):
1008 def test_returns_repos_location(self, repo_stub):
1009 model = VcsSettingsModel()
1010
1011 result_mock = mock.Mock()
1012 result_mock.ui_value = '/tmp'
1013
1014 with mock.patch.object(model, 'global_settings') as settings_mock:
1015 settings_mock.get_ui_by_key.return_value = result_mock
1016 result = model.get_repos_location()
1017
1018 settings_mock.get_ui_by_key.assert_called_once_with('/')
1019 assert result == '/tmp'
1020
1021
1022 class TestCreateOrUpdateRepoSettings(object):
996 class TestCreateOrUpdateRepoSettings(object):
1023 FORM_DATA = {
997 FORM_DATA = {
1024 'inherit_global_settings': False,
998 'inherit_global_settings': False,
1025 'hooks_changegroup_repo_size': False,
999 'hooks_changegroup_repo_size': False,
1026 'hooks_changegroup_push_logger': False,
1000 'hooks_changegroup_push_logger': False,
1027 'hooks_outgoing_pull_logger': False,
1001 'hooks_outgoing_pull_logger': False,
1028 'extensions_largefiles': False,
1002 'extensions_largefiles': False,
1029 'extensions_evolve': False,
1003 'extensions_evolve': False,
1030 'largefiles_usercache': '/example/largefiles-store',
1004 'largefiles_usercache': '/example/largefiles-store',
1031 'vcs_git_lfs_enabled': False,
1005 'vcs_git_lfs_enabled': False,
1032 'vcs_git_lfs_store_location': '/',
1006 'vcs_git_lfs_store_location': '/',
1033 'phases_publish': 'False',
1007 'phases_publish': 'False',
1034 'rhodecode_pr_merge_enabled': False,
1008 'rhodecode_pr_merge_enabled': False,
1035 'rhodecode_use_outdated_comments': False,
1009 'rhodecode_use_outdated_comments': False,
1036 'new_svn_branch': '',
1010 'new_svn_branch': '',
1037 'new_svn_tag': ''
1011 'new_svn_tag': ''
1038 }
1012 }
1039
1013
1040 def test_get_raises_exception_when_repository_not_specified(self):
1014 def test_get_raises_exception_when_repository_not_specified(self):
1041 model = VcsSettingsModel()
1015 model = VcsSettingsModel()
1042 with pytest.raises(Exception) as exc_info:
1016 with pytest.raises(Exception) as exc_info:
1043 model.create_or_update_repo_settings(data=self.FORM_DATA)
1017 model.create_or_update_repo_settings(data=self.FORM_DATA)
1044 Session().commit()
1018 Session().commit()
1045
1019
1046 assert str(exc_info.value) == 'Repository is not specified'
1020 assert str(exc_info.value) == 'Repository is not specified'
1047
1021
1048 def test_only_svn_settings_are_updated_when_type_is_svn(self, backend_svn):
1022 def test_only_svn_settings_are_updated_when_type_is_svn(self, backend_svn):
1049 repo = backend_svn.create_repo()
1023 repo = backend_svn.create_repo()
1050 model = VcsSettingsModel(repo=repo)
1024 model = VcsSettingsModel(repo=repo)
1051 with self._patch_model(model) as mocks:
1025 with self._patch_model(model) as mocks:
1052 model.create_or_update_repo_settings(
1026 model.create_or_update_repo_settings(
1053 data=self.FORM_DATA, inherit_global_settings=False)
1027 data=self.FORM_DATA, inherit_global_settings=False)
1054 Session().commit()
1028 Session().commit()
1055
1029
1056 mocks['create_repo_svn_settings'].assert_called_once_with(
1030 mocks['create_repo_svn_settings'].assert_called_once_with(
1057 self.FORM_DATA)
1031 self.FORM_DATA)
1058 non_called_methods = (
1032 non_called_methods = (
1059 'create_or_update_repo_hook_settings',
1033 'create_or_update_repo_hook_settings',
1060 'create_or_update_repo_pr_settings',
1034 'create_or_update_repo_pr_settings',
1061 'create_or_update_repo_hg_settings')
1035 'create_or_update_repo_hg_settings')
1062 for method in non_called_methods:
1036 for method in non_called_methods:
1063 assert mocks[method].call_count == 0
1037 assert mocks[method].call_count == 0
1064
1038
1065 def test_non_svn_settings_are_updated_when_type_is_hg(self, backend_hg):
1039 def test_non_svn_settings_are_updated_when_type_is_hg(self, backend_hg):
1066 repo = backend_hg.create_repo()
1040 repo = backend_hg.create_repo()
1067 model = VcsSettingsModel(repo=repo)
1041 model = VcsSettingsModel(repo=repo)
1068 with self._patch_model(model) as mocks:
1042 with self._patch_model(model) as mocks:
1069 model.create_or_update_repo_settings(
1043 model.create_or_update_repo_settings(
1070 data=self.FORM_DATA, inherit_global_settings=False)
1044 data=self.FORM_DATA, inherit_global_settings=False)
1071 Session().commit()
1045 Session().commit()
1072
1046
1073 assert mocks['create_repo_svn_settings'].call_count == 0
1047 assert mocks['create_repo_svn_settings'].call_count == 0
1074 called_methods = (
1048 called_methods = (
1075 'create_or_update_repo_hook_settings',
1049 'create_or_update_repo_hook_settings',
1076 'create_or_update_repo_pr_settings',
1050 'create_or_update_repo_pr_settings',
1077 'create_or_update_repo_hg_settings')
1051 'create_or_update_repo_hg_settings')
1078 for method in called_methods:
1052 for method in called_methods:
1079 mocks[method].assert_called_once_with(self.FORM_DATA)
1053 mocks[method].assert_called_once_with(self.FORM_DATA)
1080
1054
1081 def test_non_svn_and_hg_settings_are_updated_when_type_is_git(
1055 def test_non_svn_and_hg_settings_are_updated_when_type_is_git(
1082 self, backend_git):
1056 self, backend_git):
1083 repo = backend_git.create_repo()
1057 repo = backend_git.create_repo()
1084 model = VcsSettingsModel(repo=repo)
1058 model = VcsSettingsModel(repo=repo)
1085 with self._patch_model(model) as mocks:
1059 with self._patch_model(model) as mocks:
1086 model.create_or_update_repo_settings(
1060 model.create_or_update_repo_settings(
1087 data=self.FORM_DATA, inherit_global_settings=False)
1061 data=self.FORM_DATA, inherit_global_settings=False)
1088
1062
1089 assert mocks['create_repo_svn_settings'].call_count == 0
1063 assert mocks['create_repo_svn_settings'].call_count == 0
1090 called_methods = (
1064 called_methods = (
1091 'create_or_update_repo_hook_settings',
1065 'create_or_update_repo_hook_settings',
1092 'create_or_update_repo_pr_settings')
1066 'create_or_update_repo_pr_settings')
1093 non_called_methods = (
1067 non_called_methods = (
1094 'create_repo_svn_settings',
1068 'create_repo_svn_settings',
1095 'create_or_update_repo_hg_settings'
1069 'create_or_update_repo_hg_settings'
1096 )
1070 )
1097 for method in called_methods:
1071 for method in called_methods:
1098 mocks[method].assert_called_once_with(self.FORM_DATA)
1072 mocks[method].assert_called_once_with(self.FORM_DATA)
1099 for method in non_called_methods:
1073 for method in non_called_methods:
1100 assert mocks[method].call_count == 0
1074 assert mocks[method].call_count == 0
1101
1075
1102 def test_no_methods_are_called_when_settings_are_inherited(
1076 def test_no_methods_are_called_when_settings_are_inherited(
1103 self, backend):
1077 self, backend):
1104 repo = backend.create_repo()
1078 repo = backend.create_repo()
1105 model = VcsSettingsModel(repo=repo)
1079 model = VcsSettingsModel(repo=repo)
1106 with self._patch_model(model) as mocks:
1080 with self._patch_model(model) as mocks:
1107 model.create_or_update_repo_settings(
1081 model.create_or_update_repo_settings(
1108 data=self.FORM_DATA, inherit_global_settings=True)
1082 data=self.FORM_DATA, inherit_global_settings=True)
1109 for method_name in mocks:
1083 for method_name in mocks:
1110 assert mocks[method_name].call_count == 0
1084 assert mocks[method_name].call_count == 0
1111
1085
1112 def test_cache_is_marked_for_invalidation(self, repo_stub):
1086 def test_cache_is_marked_for_invalidation(self, repo_stub):
1113 model = VcsSettingsModel(repo=repo_stub)
1087 model = VcsSettingsModel(repo=repo_stub)
1114 invalidation_patcher = mock.patch(
1088 invalidation_patcher = mock.patch(
1115 'rhodecode.model.scm.ScmModel.mark_for_invalidation')
1089 'rhodecode.model.scm.ScmModel.mark_for_invalidation')
1116 with invalidation_patcher as invalidation_mock:
1090 with invalidation_patcher as invalidation_mock:
1117 model.create_or_update_repo_settings(
1091 model.create_or_update_repo_settings(
1118 data=self.FORM_DATA, inherit_global_settings=True)
1092 data=self.FORM_DATA, inherit_global_settings=True)
1119 Session().commit()
1093 Session().commit()
1120
1094
1121 invalidation_mock.assert_called_once_with(
1095 invalidation_mock.assert_called_once_with(
1122 repo_stub.repo_name, delete=True)
1096 repo_stub.repo_name, delete=True)
1123
1097
1124 def test_inherit_flag_is_saved(self, repo_stub):
1098 def test_inherit_flag_is_saved(self, repo_stub):
1125 model = VcsSettingsModel(repo=repo_stub)
1099 model = VcsSettingsModel(repo=repo_stub)
1126 model.inherit_global_settings = True
1100 model.inherit_global_settings = True
1127 with self._patch_model(model):
1101 with self._patch_model(model):
1128 model.create_or_update_repo_settings(
1102 model.create_or_update_repo_settings(
1129 data=self.FORM_DATA, inherit_global_settings=False)
1103 data=self.FORM_DATA, inherit_global_settings=False)
1130 Session().commit()
1104 Session().commit()
1131
1105
1132 assert model.inherit_global_settings is False
1106 assert model.inherit_global_settings is False
1133
1107
1134 def _patch_model(self, model):
1108 def _patch_model(self, model):
1135 return mock.patch.multiple(
1109 return mock.patch.multiple(
1136 model,
1110 model,
1137 create_repo_svn_settings=mock.DEFAULT,
1111 create_repo_svn_settings=mock.DEFAULT,
1138 create_or_update_repo_hook_settings=mock.DEFAULT,
1112 create_or_update_repo_hook_settings=mock.DEFAULT,
1139 create_or_update_repo_pr_settings=mock.DEFAULT,
1113 create_or_update_repo_pr_settings=mock.DEFAULT,
1140 create_or_update_repo_hg_settings=mock.DEFAULT)
1114 create_or_update_repo_hg_settings=mock.DEFAULT)
@@ -1,171 +1,171 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import os
20 import os
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import tempfile
23 import tempfile
24
24
25
25
26 from rhodecode.lib.exceptions import AttachedForksError
26 from rhodecode.lib.exceptions import AttachedForksError
27 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.utils import make_db_config
28 from rhodecode.model.db import Repository
28 from rhodecode.model.db import Repository
29 from rhodecode.model.meta import Session
29 from rhodecode.model.meta import Session
30 from rhodecode.model.repo import RepoModel
30 from rhodecode.model.repo import RepoModel
31 from rhodecode.model.scm import ScmModel
31 from rhodecode.model.scm import ScmModel
32
32
33
33
34 class TestRepoModel(object):
34 class TestRepoModel(object):
35
35
36 def test_remove_repo(self, backend):
36 def test_remove_repo(self, backend):
37 repo = backend.create_repo()
37 repo = backend.create_repo()
38 Session().commit()
38 Session().commit()
39 RepoModel().delete(repo=repo)
39 RepoModel().delete(repo=repo)
40 Session().commit()
40 Session().commit()
41
41
42 repos = ScmModel().repo_scan()
42 repos = ScmModel().repo_scan()
43
43
44 assert Repository.get_by_repo_name(repo_name=backend.repo_name) is None
44 assert Repository.get_by_repo_name(repo_name=backend.repo_name) is None
45 assert repo.repo_name not in repos
45 assert repo.repo_name not in repos
46
46
47 def test_remove_repo_raises_exc_when_attached_forks(self, backend):
47 def test_remove_repo_raises_exc_when_attached_forks(self, backend):
48 repo = backend.create_repo()
48 repo = backend.create_repo()
49 Session().commit()
49 Session().commit()
50 backend.create_fork()
50 backend.create_fork()
51 Session().commit()
51 Session().commit()
52
52
53 with pytest.raises(AttachedForksError):
53 with pytest.raises(AttachedForksError):
54 RepoModel().delete(repo=repo)
54 RepoModel().delete(repo=repo)
55
55
56 def test_remove_repo_delete_forks(self, backend):
56 def test_remove_repo_delete_forks(self, backend):
57 repo = backend.create_repo()
57 repo = backend.create_repo()
58 Session().commit()
58 Session().commit()
59
59
60 fork = backend.create_fork()
60 fork = backend.create_fork()
61 Session().commit()
61 Session().commit()
62
62
63 fork_of_fork = backend.create_fork()
63 fork_of_fork = backend.create_fork()
64 Session().commit()
64 Session().commit()
65
65
66 RepoModel().delete(repo=repo, forks='delete')
66 RepoModel().delete(repo=repo, forks='delete')
67 Session().commit()
67 Session().commit()
68
68
69 assert Repository.get_by_repo_name(repo_name=repo.repo_name) is None
69 assert Repository.get_by_repo_name(repo_name=repo.repo_name) is None
70 assert Repository.get_by_repo_name(repo_name=fork.repo_name) is None
70 assert Repository.get_by_repo_name(repo_name=fork.repo_name) is None
71 assert (
71 assert (
72 Repository.get_by_repo_name(repo_name=fork_of_fork.repo_name)
72 Repository.get_by_repo_name(repo_name=fork_of_fork.repo_name)
73 is None)
73 is None)
74
74
75 def test_remove_repo_detach_forks(self, backend):
75 def test_remove_repo_detach_forks(self, backend):
76 repo = backend.create_repo()
76 repo = backend.create_repo()
77 Session().commit()
77 Session().commit()
78
78
79 fork = backend.create_fork()
79 fork = backend.create_fork()
80 Session().commit()
80 Session().commit()
81
81
82 fork_of_fork = backend.create_fork()
82 fork_of_fork = backend.create_fork()
83 Session().commit()
83 Session().commit()
84
84
85 RepoModel().delete(repo=repo, forks='detach')
85 RepoModel().delete(repo=repo, forks='detach')
86 Session().commit()
86 Session().commit()
87
87
88 assert Repository.get_by_repo_name(repo_name=repo.repo_name) is None
88 assert Repository.get_by_repo_name(repo_name=repo.repo_name) is None
89 assert (
89 assert (
90 Repository.get_by_repo_name(repo_name=fork.repo_name) is not None)
90 Repository.get_by_repo_name(repo_name=fork.repo_name) is not None)
91 assert (
91 assert (
92 Repository.get_by_repo_name(repo_name=fork_of_fork.repo_name)
92 Repository.get_by_repo_name(repo_name=fork_of_fork.repo_name)
93 is not None)
93 is not None)
94
94
95 @pytest.mark.parametrize("filename, expected", [
95 @pytest.mark.parametrize("filename, expected", [
96 ("README", True),
96 ("README", True),
97 ("README.rst", False),
97 ("README.rst", False),
98 ])
98 ])
99 def test_filenode_is_link(self, vcsbackend, filename, expected):
99 def test_filenode_is_link(self, vcsbackend, filename, expected):
100 repo = vcsbackend.repo
100 repo = vcsbackend.repo
101 assert repo.get_commit().is_link(filename) is expected
101 assert repo.get_commit().is_link(filename) is expected
102
102
103 def test_get_commit(self, backend):
103 def test_get_commit(self, backend):
104 backend.repo.get_commit()
104 backend.repo.get_commit()
105
105
106 def test_get_changeset_is_deprecated(self, backend):
106 def test_get_changeset_is_deprecated(self, backend):
107 repo = backend.repo
107 repo = backend.repo
108 pytest.deprecated_call(repo.get_changeset)
108 pytest.deprecated_call(repo.get_changeset)
109
109
110 def test_clone_url_encrypted_value(self, backend):
110 def test_clone_url_encrypted_value(self, backend):
111 repo = backend.create_repo()
111 repo = backend.create_repo()
112 Session().commit()
112 Session().commit()
113
113
114 repo.clone_url = 'https://marcink:qweqwe@code.rhodecode.com'
114 repo.clone_url = 'https://marcink:qweqwe@code.rhodecode.com'
115 Session().add(repo)
115 Session().add(repo)
116 Session().commit()
116 Session().commit()
117
117
118 assert repo.clone_url == 'https://marcink:qweqwe@code.rhodecode.com'
118 assert repo.clone_url == 'https://marcink:qweqwe@code.rhodecode.com'
119
119
120 @pytest.mark.backends("git", "svn")
120 @pytest.mark.backends("git", "svn")
121 def test_create_filesystem_repo_installs_hooks(self, tmpdir, backend):
121 def test_create_filesystem_repo_installs_hooks(self, tmpdir, backend):
122 repo = backend.create_repo()
122 repo = backend.create_repo()
123 repo_name = repo.repo_name
123 repo_name = repo.repo_name
124 model = RepoModel()
124 with mock.patch('rhodecode.model.repo.RepoModel.repos_path',
125 repo_location = tempfile.mkdtemp()
125 new_callable=mock.PropertyMock) as mocked_models_property:
126 model.repos_path = repo_location
126 mocked_models_property.return_value = tempfile.mkdtemp()
127 repo = model._create_filesystem_repo(
127 repo = RepoModel()._create_filesystem_repo(
128 repo_name, backend.alias, repo_group='', clone_uri=None)
128 repo_name, backend.alias, repo_group='', clone_uri=None)
129
129
130 hooks = {
130 hooks = {
131 'svn': ('pre-commit', 'post-commit'),
131 'svn': ('pre-commit', 'post-commit'),
132 'git': ('pre-receive', 'post-receive'),
132 'git': ('pre-receive', 'post-receive'),
133 }
133 }
134 for hook in hooks[backend.alias]:
134 for hook in hooks[backend.alias]:
135 with open(os.path.join(repo.path, 'hooks', hook)) as f:
135 with open(os.path.join(repo.path, 'hooks', hook)) as f:
136 data = f.read()
136 data = f.read()
137 assert 'RC_HOOK_VER' in data
137 assert 'RC_HOOK_VER' in data
138
138
139 @pytest.mark.parametrize("use_global_config, repo_name_passed", [
139 @pytest.mark.parametrize("use_global_config, repo_name_passed", [
140 (True, False),
140 (True, False),
141 (False, True)
141 (False, True)
142 ])
142 ])
143 def test_per_repo_config_is_generated_during_filesystem_repo_creation(
143 def test_per_repo_config_is_generated_during_filesystem_repo_creation(
144 self, tmpdir, backend, use_global_config, repo_name_passed):
144 self, tmpdir, backend, use_global_config, repo_name_passed):
145 repo_name = 'test-{}-repo-{}'.format(backend.alias, use_global_config)
145 repo_name = 'test-{}-repo-{}'.format(backend.alias, use_global_config)
146 config = make_db_config()
146 config = make_db_config()
147 model = RepoModel()
147 model = RepoModel()
148 with mock.patch('rhodecode.model.repo.make_db_config') as config_mock:
148 with mock.patch('rhodecode.model.repo.make_db_config') as config_mock:
149 config_mock.return_value = config
149 config_mock.return_value = config
150 model._create_filesystem_repo(
150 model._create_filesystem_repo(
151 repo_name, backend.alias, repo_group='', clone_uri=None,
151 repo_name, backend.alias, repo_group='', clone_uri=None,
152 use_global_config=use_global_config)
152 use_global_config=use_global_config)
153 expected_repo_name = repo_name if repo_name_passed else None
153 expected_repo_name = repo_name if repo_name_passed else None
154 expected_call = mock.call(clear_session=False, repo=expected_repo_name)
154 expected_call = mock.call(clear_session=False, repo=expected_repo_name)
155 assert expected_call in config_mock.call_args_list
155 assert expected_call in config_mock.call_args_list
156
156
157 def test_update_commit_cache_with_config(serf, backend):
157 def test_update_commit_cache_with_config(serf, backend):
158 repo = backend.create_repo()
158 repo = backend.create_repo()
159 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm:
159 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm:
160 scm_instance = mock.Mock()
160 scm_instance = mock.Mock()
161 scm_instance.get_commit.return_value = {
161 scm_instance.get_commit.return_value = {
162 'raw_id': 40*'0',
162 'raw_id': 40*'0',
163 'revision': 1
163 'revision': 1
164 }
164 }
165 scm.return_value = scm_instance
165 scm.return_value = scm_instance
166 repo.update_commit_cache()
166 repo.update_commit_cache()
167 scm.assert_called_with(cache=False, config=None)
167 scm.assert_called_with(cache=False, config=None)
168 config = {'test': 'config'}
168 config = {'test': 'config'}
169 repo.update_commit_cache(config=config)
169 repo.update_commit_cache(config=config)
170 scm.assert_called_with(
170 scm.assert_called_with(
171 cache=False, config=config)
171 cache=False, config=config)
@@ -1,770 +1,767 b''
1
1
2 ; #########################################
2 ; #########################################
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 ; #########################################
4 ; #########################################
5
5
6 [DEFAULT]
6 [DEFAULT]
7 ; Debug flag sets all loggers to debug, and enables request tracking
7 ; Debug flag sets all loggers to debug, and enables request tracking
8 debug = true
8 debug = true
9
9
10 ; ########################################################################
10 ; ########################################################################
11 ; EMAIL CONFIGURATION
11 ; EMAIL CONFIGURATION
12 ; These settings will be used by the RhodeCode mailing system
12 ; These settings will be used by the RhodeCode mailing system
13 ; ########################################################################
13 ; ########################################################################
14
14
15 ; prefix all emails subjects with given prefix, helps filtering out emails
15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 #email_prefix = [RhodeCode]
16 #email_prefix = [RhodeCode]
17
17
18 ; email FROM address all mails will be sent
18 ; email FROM address all mails will be sent
19 #app_email_from = rhodecode-noreply@localhost
19 #app_email_from = rhodecode-noreply@localhost
20
20
21 #smtp_server = mail.server.com
21 #smtp_server = mail.server.com
22 #smtp_username =
22 #smtp_username =
23 #smtp_password =
23 #smtp_password =
24 #smtp_port =
24 #smtp_port =
25 #smtp_use_tls = false
25 #smtp_use_tls = false
26 #smtp_use_ssl = true
26 #smtp_use_ssl = true
27
27
28 [server:main]
28 [server:main]
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 host = 127.0.0.1
31 host = 127.0.0.1
32 port = 10020
32 port = 10020
33
33
34
34
35 ; ###########################
35 ; ###########################
36 ; GUNICORN APPLICATION SERVER
36 ; GUNICORN APPLICATION SERVER
37 ; ###########################
37 ; ###########################
38
38
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40
40
41 ; Module to use, this setting shouldn't be changed
41 ; Module to use, this setting shouldn't be changed
42 use = egg:gunicorn#main
42 use = egg:gunicorn#main
43
43
44 ; Prefix middleware for RhodeCode.
44 ; Prefix middleware for RhodeCode.
45 ; recommended when using proxy setup.
45 ; recommended when using proxy setup.
46 ; allows to set RhodeCode under a prefix in server.
46 ; allows to set RhodeCode under a prefix in server.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 ; And set your prefix like: `prefix = /custom_prefix`
48 ; And set your prefix like: `prefix = /custom_prefix`
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 ; to make your cookies only work on prefix url
50 ; to make your cookies only work on prefix url
51 [filter:proxy-prefix]
51 [filter:proxy-prefix]
52 use = egg:PasteDeploy#prefix
52 use = egg:PasteDeploy#prefix
53 prefix = /
53 prefix = /
54
54
55 [app:main]
55 [app:main]
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 ; of this file
57 ; of this file
58 ; Each option in the app:main can be override by an environmental variable
58 ; Each option in the app:main can be override by an environmental variable
59 ;
59 ;
60 ;To override an option:
60 ;To override an option:
61 ;
61 ;
62 ;RC_<KeyName>
62 ;RC_<KeyName>
63 ;Everything should be uppercase, . and - should be replaced by _.
63 ;Everything should be uppercase, . and - should be replaced by _.
64 ;For example, if you have these configuration settings:
64 ;For example, if you have these configuration settings:
65 ;rc_cache.repo_object.backend = foo
65 ;rc_cache.repo_object.backend = foo
66 ;can be overridden by
66 ;can be overridden by
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68
68
69 use = egg:rhodecode-enterprise-ce
69 use = egg:rhodecode-enterprise-ce
70
70
71 ; enable proxy prefix middleware, defined above
71 ; enable proxy prefix middleware, defined above
72 #filter-with = proxy-prefix
72 #filter-with = proxy-prefix
73
73
74 ; encryption key used to encrypt social plugin tokens,
74 ; encryption key used to encrypt social plugin tokens,
75 ; remote_urls with credentials etc, if not set it defaults to
75 ; remote_urls with credentials etc, if not set it defaults to
76 ; `beaker.session.secret`
76 ; `beaker.session.secret`
77 #rhodecode.encrypted_values.secret =
77 #rhodecode.encrypted_values.secret =
78
78
79 ; decryption strict mode (enabled by default). It controls if decryption raises
79 ; decryption strict mode (enabled by default). It controls if decryption raises
80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 #rhodecode.encrypted_values.strict = false
81 #rhodecode.encrypted_values.strict = false
82
82
83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 ; fernet is safer, and we strongly recommend switching to it.
84 ; fernet is safer, and we strongly recommend switching to it.
85 ; Due to backward compatibility aes is used as default.
85 ; Due to backward compatibility aes is used as default.
86 #rhodecode.encrypted_values.algorithm = fernet
86 #rhodecode.encrypted_values.algorithm = fernet
87
87
88 ; Return gzipped responses from RhodeCode (static files/application)
88 ; Return gzipped responses from RhodeCode (static files/application)
89 gzip_responses = false
89 gzip_responses = false
90
90
91 ; Auto-generate javascript routes file on startup
91 ; Auto-generate javascript routes file on startup
92 generate_js_files = false
92 generate_js_files = false
93
93
94 ; System global default language.
94 ; System global default language.
95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 lang = en
96 lang = en
97
97
98 ; Perform a full repository scan and import on each server start.
98 ; Perform a full repository scan and import on each server start.
99 ; Settings this to true could lead to very long startup time.
99 ; Settings this to true could lead to very long startup time.
100 startup.import_repos = true
100 startup.import_repos = true
101
101
102 ; URL at which the application is running. This is used for Bootstrapping
102 ; URL at which the application is running. This is used for Bootstrapping
103 ; requests in context when no web request is available. Used in ishell, or
103 ; requests in context when no web request is available. Used in ishell, or
104 ; SSH calls. Set this for events to receive proper url for SSH calls.
104 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 app.base_url = http://rhodecode.local
105 app.base_url = http://rhodecode.local
106
106
107 ; Unique application ID. Should be a random unique string for security.
107 ; Unique application ID. Should be a random unique string for security.
108 app_instance_uuid = rc-production
108 app_instance_uuid = rc-production
109
109
110 ; Cut off limit for large diffs (size in bytes). If overall diff size on
110 ; Cut off limit for large diffs (size in bytes). If overall diff size on
111 ; commit, or pull request exceeds this limit this diff will be displayed
111 ; commit, or pull request exceeds this limit this diff will be displayed
112 ; partially. E.g 512000 == 512Kb
112 ; partially. E.g 512000 == 512Kb
113 cut_off_limit_diff = 1024000
113 cut_off_limit_diff = 1024000
114
114
115 ; Cut off limit for large files inside diffs (size in bytes). Each individual
115 ; Cut off limit for large files inside diffs (size in bytes). Each individual
116 ; file inside diff which exceeds this limit will be displayed partially.
116 ; file inside diff which exceeds this limit will be displayed partially.
117 ; E.g 128000 == 128Kb
117 ; E.g 128000 == 128Kb
118 cut_off_limit_file = 256000
118 cut_off_limit_file = 256000
119
119
120 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
120 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
121 vcs_full_cache = false
121 vcs_full_cache = false
122
122
123 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
123 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
124 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
124 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
125 force_https = false
125 force_https = false
126
126
127 ; use Strict-Transport-Security headers
127 ; use Strict-Transport-Security headers
128 use_htsts = false
128 use_htsts = false
129
129
130 ; Set to true if your repos are exposed using the dumb protocol
130 ; Set to true if your repos are exposed using the dumb protocol
131 git_update_server_info = false
131 git_update_server_info = false
132
132
133 ; RSS/ATOM feed options
133 ; RSS/ATOM feed options
134 rss_cut_off_limit = 256000
134 rss_cut_off_limit = 256000
135 rss_items_per_page = 10
135 rss_items_per_page = 10
136 rss_include_diff = false
136 rss_include_diff = false
137
137
138 ; gist URL alias, used to create nicer urls for gist. This should be an
138 ; gist URL alias, used to create nicer urls for gist. This should be an
139 ; url that does rewrites to _admin/gists/{gistid}.
139 ; url that does rewrites to _admin/gists/{gistid}.
140 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
140 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
141 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
141 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
142 gist_alias_url =
142 gist_alias_url =
143
143
144 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
144 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
145 ; used for access.
145 ; used for access.
146 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
146 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
147 ; came from the the logged in user who own this authentication token.
147 ; came from the the logged in user who own this authentication token.
148 ; Additionally @TOKEN syntax can be used to bound the view to specific
148 ; Additionally @TOKEN syntax can be used to bound the view to specific
149 ; authentication token. Such view would be only accessible when used together
149 ; authentication token. Such view would be only accessible when used together
150 ; with this authentication token
150 ; with this authentication token
151 ; list of all views can be found under `/_admin/permissions/auth_token_access`
151 ; list of all views can be found under `/_admin/permissions/auth_token_access`
152 ; The list should be "," separated and on a single line.
152 ; The list should be "," separated and on a single line.
153 ; Most common views to enable:
153 ; Most common views to enable:
154
154
155 # RepoCommitsView:repo_commit_download
155 # RepoCommitsView:repo_commit_download
156 # RepoCommitsView:repo_commit_patch
156 # RepoCommitsView:repo_commit_patch
157 # RepoCommitsView:repo_commit_raw
157 # RepoCommitsView:repo_commit_raw
158 # RepoCommitsView:repo_commit_raw@TOKEN
158 # RepoCommitsView:repo_commit_raw@TOKEN
159 # RepoFilesView:repo_files_diff
159 # RepoFilesView:repo_files_diff
160 # RepoFilesView:repo_archivefile
160 # RepoFilesView:repo_archivefile
161 # RepoFilesView:repo_file_raw
161 # RepoFilesView:repo_file_raw
162 # GistView:*
162 # GistView:*
163 api_access_controllers_whitelist =
163 api_access_controllers_whitelist =
164
164
165 ; Default encoding used to convert from and to unicode
165 ; Default encoding used to convert from and to unicode
166 ; can be also a comma separated list of encoding in case of mixed encodings
166 ; can be also a comma separated list of encoding in case of mixed encodings
167 default_encoding = UTF-8
167 default_encoding = UTF-8
168
168
169 ; instance-id prefix
169 ; instance-id prefix
170 ; a prefix key for this instance used for cache invalidation when running
170 ; a prefix key for this instance used for cache invalidation when running
171 ; multiple instances of RhodeCode, make sure it's globally unique for
171 ; multiple instances of RhodeCode, make sure it's globally unique for
172 ; all running RhodeCode instances. Leave empty if you don't use it
172 ; all running RhodeCode instances. Leave empty if you don't use it
173 instance_id =
173 instance_id =
174
174
175 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
175 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
176 ; of an authentication plugin also if it is disabled by it's settings.
176 ; of an authentication plugin also if it is disabled by it's settings.
177 ; This could be useful if you are unable to log in to the system due to broken
177 ; This could be useful if you are unable to log in to the system due to broken
178 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
178 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
179 ; module to log in again and fix the settings.
179 ; module to log in again and fix the settings.
180 ; Available builtin plugin IDs (hash is part of the ID):
180 ; Available builtin plugin IDs (hash is part of the ID):
181 ; egg:rhodecode-enterprise-ce#rhodecode
181 ; egg:rhodecode-enterprise-ce#rhodecode
182 ; egg:rhodecode-enterprise-ce#pam
182 ; egg:rhodecode-enterprise-ce#pam
183 ; egg:rhodecode-enterprise-ce#ldap
183 ; egg:rhodecode-enterprise-ce#ldap
184 ; egg:rhodecode-enterprise-ce#jasig_cas
184 ; egg:rhodecode-enterprise-ce#jasig_cas
185 ; egg:rhodecode-enterprise-ce#headers
185 ; egg:rhodecode-enterprise-ce#headers
186 ; egg:rhodecode-enterprise-ce#crowd
186 ; egg:rhodecode-enterprise-ce#crowd
187
187
188 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
188 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
189
189
190 ; Flag to control loading of legacy plugins in py:/path format
190 ; Flag to control loading of legacy plugins in py:/path format
191 auth_plugin.import_legacy_plugins = true
191 auth_plugin.import_legacy_plugins = true
192
192
193 ; alternative return HTTP header for failed authentication. Default HTTP
193 ; alternative return HTTP header for failed authentication. Default HTTP
194 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
194 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
195 ; handling that causing a series of failed authentication calls.
195 ; handling that causing a series of failed authentication calls.
196 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
196 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
197 ; This will be served instead of default 401 on bad authentication
197 ; This will be served instead of default 401 on bad authentication
198 auth_ret_code =
198 auth_ret_code =
199
199
200 ; use special detection method when serving auth_ret_code, instead of serving
200 ; use special detection method when serving auth_ret_code, instead of serving
201 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
201 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
202 ; and then serve auth_ret_code to clients
202 ; and then serve auth_ret_code to clients
203 auth_ret_code_detection = false
203 auth_ret_code_detection = false
204
204
205 ; locking return code. When repository is locked return this HTTP code. 2XX
205 ; locking return code. When repository is locked return this HTTP code. 2XX
206 ; codes don't break the transactions while 4XX codes do
206 ; codes don't break the transactions while 4XX codes do
207 lock_ret_code = 423
207 lock_ret_code = 423
208
208
209 ; allows to change the repository location in settings page
210 allow_repo_location_change = true
211
212 ; allows to setup custom hooks in settings page
209 ; allows to setup custom hooks in settings page
213 allow_custom_hooks_settings = true
210 allow_custom_hooks_settings = true
214
211
215 ; Generated license token required for EE edition license.
212 ; Generated license token required for EE edition license.
216 ; New generated token value can be found in Admin > settings > license page.
213 ; New generated token value can be found in Admin > settings > license page.
217 license_token = abra-cada-bra1-rce3
214 license_token = abra-cada-bra1-rce3
218
215
219 ; This flag hides sensitive information on the license page such as token, and license data
216 ; This flag hides sensitive information on the license page such as token, and license data
220 license.hide_license_info = false
217 license.hide_license_info = false
221
218
222 ; supervisor connection uri, for managing supervisor and logs.
219 ; supervisor connection uri, for managing supervisor and logs.
223 supervisor.uri =
220 supervisor.uri =
224
221
225 ; supervisord group name/id we only want this RC instance to handle
222 ; supervisord group name/id we only want this RC instance to handle
226 supervisor.group_id = dev
223 supervisor.group_id = dev
227
224
228 ; Display extended labs settings
225 ; Display extended labs settings
229 labs_settings_active = true
226 labs_settings_active = true
230
227
231 ; Custom exception store path, defaults to TMPDIR
228 ; Custom exception store path, defaults to TMPDIR
232 ; This is used to store exception from RhodeCode in shared directory
229 ; This is used to store exception from RhodeCode in shared directory
233 #exception_tracker.store_path =
230 #exception_tracker.store_path =
234
231
235 ; Send email with exception details when it happens
232 ; Send email with exception details when it happens
236 #exception_tracker.send_email = false
233 #exception_tracker.send_email = false
237
234
238 ; Comma separated list of recipients for exception emails,
235 ; Comma separated list of recipients for exception emails,
239 ; e.g admin@rhodecode.com,devops@rhodecode.com
236 ; e.g admin@rhodecode.com,devops@rhodecode.com
240 ; Can be left empty, then emails will be sent to ALL super-admins
237 ; Can be left empty, then emails will be sent to ALL super-admins
241 #exception_tracker.send_email_recipients =
238 #exception_tracker.send_email_recipients =
242
239
243 ; optional prefix to Add to email Subject
240 ; optional prefix to Add to email Subject
244 #exception_tracker.email_prefix = [RHODECODE ERROR]
241 #exception_tracker.email_prefix = [RHODECODE ERROR]
245
242
246 ; File store configuration. This is used to store and serve uploaded files
243 ; File store configuration. This is used to store and serve uploaded files
247 file_store.enabled = true
244 file_store.enabled = true
248
245
249 ; Storage backend, available options are: local
246 ; Storage backend, available options are: local
250 file_store.backend = local
247 file_store.backend = local
251
248
252 ; path to store the uploaded binaries
249 ; path to store the uploaded binaries
253 file_store.storage_path = %(here)s/data/file_store
250 file_store.storage_path = %(here)s/data/file_store
254
251
255 ; Uncomment and set this path to control settings for archive download cache.
252 ; Uncomment and set this path to control settings for archive download cache.
256 ; Generated repo archives will be cached at this location
253 ; Generated repo archives will be cached at this location
257 ; and served from the cache during subsequent requests for the same archive of
254 ; and served from the cache during subsequent requests for the same archive of
258 ; the repository. This path is important to be shared across filesystems and with
255 ; the repository. This path is important to be shared across filesystems and with
259 ; RhodeCode and vcsserver
256 ; RhodeCode and vcsserver
260
257
261 ; Default is $cache_dir/archive_cache if not set
258 ; Default is $cache_dir/archive_cache if not set
262 archive_cache.store_dir = /tmp/rc-test-data/archive_cache
259 archive_cache.store_dir = /tmp/rc-test-data/archive_cache
263
260
264 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
261 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
265 archive_cache.cache_size_gb = 10
262 archive_cache.cache_size_gb = 10
266
263
267 ; By default cache uses sharding technique, this specifies how many shards are there
264 ; By default cache uses sharding technique, this specifies how many shards are there
268 archive_cache.cache_shards = 10
265 archive_cache.cache_shards = 10
269
266
270 ; #############
267 ; #############
271 ; CELERY CONFIG
268 ; CELERY CONFIG
272 ; #############
269 ; #############
273
270
274 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
271 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
275
272
276 use_celery = false
273 use_celery = false
277
274
278 ; path to store schedule database
275 ; path to store schedule database
279 #celerybeat-schedule.path =
276 #celerybeat-schedule.path =
280
277
281 ; connection url to the message broker (default redis)
278 ; connection url to the message broker (default redis)
282 celery.broker_url = redis://localhost:6379/8
279 celery.broker_url = redis://localhost:6379/8
283
280
284 ; rabbitmq example
281 ; rabbitmq example
285 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
282 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
286
283
287 ; maximum tasks to execute before worker restart
284 ; maximum tasks to execute before worker restart
288 celery.max_tasks_per_child = 20
285 celery.max_tasks_per_child = 20
289
286
290 ; tasks will never be sent to the queue, but executed locally instead.
287 ; tasks will never be sent to the queue, but executed locally instead.
291 celery.task_always_eager = false
288 celery.task_always_eager = false
292
289
293 ; #############
290 ; #############
294 ; DOGPILE CACHE
291 ; DOGPILE CACHE
295 ; #############
292 ; #############
296
293
297 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
294 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
298 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
295 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
299 cache_dir = %(here)s/rc-test-data
296 cache_dir = %(here)s/rc-test-data
300
297
301 ; *********************************************
298 ; *********************************************
302 ; `sql_cache_short` cache for heavy SQL queries
299 ; `sql_cache_short` cache for heavy SQL queries
303 ; Only supported backend is `memory_lru`
300 ; Only supported backend is `memory_lru`
304 ; *********************************************
301 ; *********************************************
305 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
302 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
306 rc_cache.sql_cache_short.expiration_time = 0
303 rc_cache.sql_cache_short.expiration_time = 0
307
304
308
305
309 ; *****************************************************
306 ; *****************************************************
310 ; `cache_repo_longterm` cache for repo object instances
307 ; `cache_repo_longterm` cache for repo object instances
311 ; Only supported backend is `memory_lru`
308 ; Only supported backend is `memory_lru`
312 ; *****************************************************
309 ; *****************************************************
313 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
310 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
314 ; by default we use 30 Days, cache is still invalidated on push
311 ; by default we use 30 Days, cache is still invalidated on push
315 rc_cache.cache_repo_longterm.expiration_time = 2592000
312 rc_cache.cache_repo_longterm.expiration_time = 2592000
316 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
313 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
317 rc_cache.cache_repo_longterm.max_size = 10000
314 rc_cache.cache_repo_longterm.max_size = 10000
318
315
319
316
320 ; *********************************************
317 ; *********************************************
321 ; `cache_general` cache for general purpose use
318 ; `cache_general` cache for general purpose use
322 ; for simplicity use rc.file_namespace backend,
319 ; for simplicity use rc.file_namespace backend,
323 ; for performance and scale use rc.redis
320 ; for performance and scale use rc.redis
324 ; *********************************************
321 ; *********************************************
325 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
322 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
326 rc_cache.cache_general.expiration_time = 43200
323 rc_cache.cache_general.expiration_time = 43200
327 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
324 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
328 rc_cache.cache_general.arguments.filename = %(here)s/cache-backend/cache_general_db
325 rc_cache.cache_general.arguments.filename = %(here)s/cache-backend/cache_general_db
329
326
330 ; alternative `cache_general` redis backend with distributed lock
327 ; alternative `cache_general` redis backend with distributed lock
331 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
328 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
332 #rc_cache.cache_general.expiration_time = 300
329 #rc_cache.cache_general.expiration_time = 300
333
330
334 ; redis_expiration_time needs to be greater then expiration_time
331 ; redis_expiration_time needs to be greater then expiration_time
335 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
332 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
336
333
337 #rc_cache.cache_general.arguments.host = localhost
334 #rc_cache.cache_general.arguments.host = localhost
338 #rc_cache.cache_general.arguments.port = 6379
335 #rc_cache.cache_general.arguments.port = 6379
339 #rc_cache.cache_general.arguments.db = 0
336 #rc_cache.cache_general.arguments.db = 0
340 #rc_cache.cache_general.arguments.socket_timeout = 30
337 #rc_cache.cache_general.arguments.socket_timeout = 30
341 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
338 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
342 #rc_cache.cache_general.arguments.distributed_lock = true
339 #rc_cache.cache_general.arguments.distributed_lock = true
343
340
344 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
341 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
345 #rc_cache.cache_general.arguments.lock_auto_renewal = true
342 #rc_cache.cache_general.arguments.lock_auto_renewal = true
346
343
347 ; *************************************************
344 ; *************************************************
348 ; `cache_perms` cache for permission tree, auth TTL
345 ; `cache_perms` cache for permission tree, auth TTL
349 ; for simplicity use rc.file_namespace backend,
346 ; for simplicity use rc.file_namespace backend,
350 ; for performance and scale use rc.redis
347 ; for performance and scale use rc.redis
351 ; *************************************************
348 ; *************************************************
352 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
349 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
353 rc_cache.cache_perms.expiration_time = 0
350 rc_cache.cache_perms.expiration_time = 0
354 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
351 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
355 rc_cache.cache_perms.arguments.filename = %(here)s/cache-backend/cache_perms_db
352 rc_cache.cache_perms.arguments.filename = %(here)s/cache-backend/cache_perms_db
356
353
357 ; alternative `cache_perms` redis backend with distributed lock
354 ; alternative `cache_perms` redis backend with distributed lock
358 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
355 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
359 #rc_cache.cache_perms.expiration_time = 300
356 #rc_cache.cache_perms.expiration_time = 300
360
357
361 ; redis_expiration_time needs to be greater then expiration_time
358 ; redis_expiration_time needs to be greater then expiration_time
362 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
359 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
363
360
364 #rc_cache.cache_perms.arguments.host = localhost
361 #rc_cache.cache_perms.arguments.host = localhost
365 #rc_cache.cache_perms.arguments.port = 6379
362 #rc_cache.cache_perms.arguments.port = 6379
366 #rc_cache.cache_perms.arguments.db = 0
363 #rc_cache.cache_perms.arguments.db = 0
367 #rc_cache.cache_perms.arguments.socket_timeout = 30
364 #rc_cache.cache_perms.arguments.socket_timeout = 30
368 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
365 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
369 #rc_cache.cache_perms.arguments.distributed_lock = true
366 #rc_cache.cache_perms.arguments.distributed_lock = true
370
367
371 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
368 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
372 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
369 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
373
370
374 ; ***************************************************
371 ; ***************************************************
375 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
372 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
376 ; for simplicity use rc.file_namespace backend,
373 ; for simplicity use rc.file_namespace backend,
377 ; for performance and scale use rc.redis
374 ; for performance and scale use rc.redis
378 ; ***************************************************
375 ; ***************************************************
379 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
376 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
380 rc_cache.cache_repo.expiration_time = 2592000
377 rc_cache.cache_repo.expiration_time = 2592000
381 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
378 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
382 rc_cache.cache_repo.arguments.filename = %(here)s/cache-backend/cache_repo_db
379 rc_cache.cache_repo.arguments.filename = %(here)s/cache-backend/cache_repo_db
383
380
384 ; alternative `cache_repo` redis backend with distributed lock
381 ; alternative `cache_repo` redis backend with distributed lock
385 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
382 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
386 #rc_cache.cache_repo.expiration_time = 2592000
383 #rc_cache.cache_repo.expiration_time = 2592000
387
384
388 ; redis_expiration_time needs to be greater then expiration_time
385 ; redis_expiration_time needs to be greater then expiration_time
389 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
386 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
390
387
391 #rc_cache.cache_repo.arguments.host = localhost
388 #rc_cache.cache_repo.arguments.host = localhost
392 #rc_cache.cache_repo.arguments.port = 6379
389 #rc_cache.cache_repo.arguments.port = 6379
393 #rc_cache.cache_repo.arguments.db = 1
390 #rc_cache.cache_repo.arguments.db = 1
394 #rc_cache.cache_repo.arguments.socket_timeout = 30
391 #rc_cache.cache_repo.arguments.socket_timeout = 30
395 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
392 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
396 #rc_cache.cache_repo.arguments.distributed_lock = true
393 #rc_cache.cache_repo.arguments.distributed_lock = true
397
394
398 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
395 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
399 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
396 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
400
397
401 ; ##############
398 ; ##############
402 ; BEAKER SESSION
399 ; BEAKER SESSION
403 ; ##############
400 ; ##############
404
401
405 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
402 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
406 ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified).
403 ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified).
407 ; Fastest ones are Redis and ext:database
404 ; Fastest ones are Redis and ext:database
408 beaker.session.type = file
405 beaker.session.type = file
409 beaker.session.data_dir = %(here)s/rc-tests/data/sessions
406 beaker.session.data_dir = %(here)s/rc-tests/data/sessions
410
407
411 ; Redis based sessions
408 ; Redis based sessions
412 #beaker.session.type = ext:redis
409 #beaker.session.type = ext:redis
413 #beaker.session.url = redis://127.0.0.1:6379/2
410 #beaker.session.url = redis://127.0.0.1:6379/2
414
411
415 ; DB based session, fast, and allows easy management over logged in users
412 ; DB based session, fast, and allows easy management over logged in users
416 #beaker.session.type = ext:database
413 #beaker.session.type = ext:database
417 #beaker.session.table_name = db_session
414 #beaker.session.table_name = db_session
418 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
415 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
419 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
416 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
420 #beaker.session.sa.pool_recycle = 3600
417 #beaker.session.sa.pool_recycle = 3600
421 #beaker.session.sa.echo = false
418 #beaker.session.sa.echo = false
422
419
423 beaker.session.key = rhodecode
420 beaker.session.key = rhodecode
424 beaker.session.secret = test-rc-uytcxaz
421 beaker.session.secret = test-rc-uytcxaz
425 beaker.session.lock_dir = %(here)s/data/sessions/lock
422 beaker.session.lock_dir = %(here)s/data/sessions/lock
426
423
427 ; Secure encrypted cookie. Requires AES and AES python libraries
424 ; Secure encrypted cookie. Requires AES and AES python libraries
428 ; you must disable beaker.session.secret to use this
425 ; you must disable beaker.session.secret to use this
429 #beaker.session.encrypt_key = key_for_encryption
426 #beaker.session.encrypt_key = key_for_encryption
430 #beaker.session.validate_key = validation_key
427 #beaker.session.validate_key = validation_key
431
428
432 ; Sets session as invalid (also logging out user) if it haven not been
429 ; Sets session as invalid (also logging out user) if it haven not been
433 ; accessed for given amount of time in seconds
430 ; accessed for given amount of time in seconds
434 beaker.session.timeout = 2592000
431 beaker.session.timeout = 2592000
435 beaker.session.httponly = true
432 beaker.session.httponly = true
436
433
437 ; Path to use for the cookie. Set to prefix if you use prefix middleware
434 ; Path to use for the cookie. Set to prefix if you use prefix middleware
438 #beaker.session.cookie_path = /custom_prefix
435 #beaker.session.cookie_path = /custom_prefix
439
436
440 ; Set https secure cookie
437 ; Set https secure cookie
441 beaker.session.secure = false
438 beaker.session.secure = false
442
439
443 ## auto save the session to not to use .save()
440 ## auto save the session to not to use .save()
444 beaker.session.auto = false
441 beaker.session.auto = false
445
442
446 ; default cookie expiration time in seconds, set to `true` to set expire
443 ; default cookie expiration time in seconds, set to `true` to set expire
447 ; at browser close
444 ; at browser close
448 #beaker.session.cookie_expires = 3600
445 #beaker.session.cookie_expires = 3600
449
446
450 ; #############################
447 ; #############################
451 ; SEARCH INDEXING CONFIGURATION
448 ; SEARCH INDEXING CONFIGURATION
452 ; #############################
449 ; #############################
453
450
454 ; Full text search indexer is available in rhodecode-tools under
451 ; Full text search indexer is available in rhodecode-tools under
455 ; `rhodecode-tools index` command
452 ; `rhodecode-tools index` command
456
453
457 ; WHOOSH Backend, doesn't require additional services to run
454 ; WHOOSH Backend, doesn't require additional services to run
458 ; it works good with few dozen repos
455 ; it works good with few dozen repos
459 search.module = rhodecode.lib.index.whoosh
456 search.module = rhodecode.lib.index.whoosh
460 search.location = %(here)s/data/index
457 search.location = %(here)s/data/index
461
458
462 ; ####################
459 ; ####################
463 ; CHANNELSTREAM CONFIG
460 ; CHANNELSTREAM CONFIG
464 ; ####################
461 ; ####################
465
462
466 ; channelstream enables persistent connections and live notification
463 ; channelstream enables persistent connections and live notification
467 ; in the system. It's also used by the chat system
464 ; in the system. It's also used by the chat system
468
465
469 channelstream.enabled = false
466 channelstream.enabled = false
470
467
471 ; server address for channelstream server on the backend
468 ; server address for channelstream server on the backend
472 channelstream.server = 127.0.0.1:9800
469 channelstream.server = 127.0.0.1:9800
473
470
474 ; location of the channelstream server from outside world
471 ; location of the channelstream server from outside world
475 ; use ws:// for http or wss:// for https. This address needs to be handled
472 ; use ws:// for http or wss:// for https. This address needs to be handled
476 ; by external HTTP server such as Nginx or Apache
473 ; by external HTTP server such as Nginx or Apache
477 ; see Nginx/Apache configuration examples in our docs
474 ; see Nginx/Apache configuration examples in our docs
478 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
475 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
479 channelstream.secret = secret
476 channelstream.secret = secret
480 channelstream.history.location = %(here)s/channelstream_history
477 channelstream.history.location = %(here)s/channelstream_history
481
478
482 ; Internal application path that Javascript uses to connect into.
479 ; Internal application path that Javascript uses to connect into.
483 ; If you use proxy-prefix the prefix should be added before /_channelstream
480 ; If you use proxy-prefix the prefix should be added before /_channelstream
484 channelstream.proxy_path = /_channelstream
481 channelstream.proxy_path = /_channelstream
485
482
486
483
487 ; ##############################
484 ; ##############################
488 ; MAIN RHODECODE DATABASE CONFIG
485 ; MAIN RHODECODE DATABASE CONFIG
489 ; ##############################
486 ; ##############################
490
487
491 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
488 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
492 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
489 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
493 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
490 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
494 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
491 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
495 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
492 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
496
493
497 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
494 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
498
495
499 ; see sqlalchemy docs for other advanced settings
496 ; see sqlalchemy docs for other advanced settings
500 ; print the sql statements to output
497 ; print the sql statements to output
501 sqlalchemy.db1.echo = false
498 sqlalchemy.db1.echo = false
502
499
503 ; recycle the connections after this amount of seconds
500 ; recycle the connections after this amount of seconds
504 sqlalchemy.db1.pool_recycle = 3600
501 sqlalchemy.db1.pool_recycle = 3600
505
502
506 ; the number of connections to keep open inside the connection pool.
503 ; the number of connections to keep open inside the connection pool.
507 ; 0 indicates no limit
504 ; 0 indicates no limit
508 ; the general calculus with gevent is:
505 ; the general calculus with gevent is:
509 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
506 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
510 ; then increase pool size + max overflow so that they add up to 500.
507 ; then increase pool size + max overflow so that they add up to 500.
511 #sqlalchemy.db1.pool_size = 5
508 #sqlalchemy.db1.pool_size = 5
512
509
513 ; The number of connections to allow in connection pool "overflow", that is
510 ; The number of connections to allow in connection pool "overflow", that is
514 ; connections that can be opened above and beyond the pool_size setting,
511 ; connections that can be opened above and beyond the pool_size setting,
515 ; which defaults to five.
512 ; which defaults to five.
516 #sqlalchemy.db1.max_overflow = 10
513 #sqlalchemy.db1.max_overflow = 10
517
514
518 ; Connection check ping, used to detect broken database connections
515 ; Connection check ping, used to detect broken database connections
519 ; could be enabled to better handle cases if MySQL has gone away errors
516 ; could be enabled to better handle cases if MySQL has gone away errors
520 #sqlalchemy.db1.ping_connection = true
517 #sqlalchemy.db1.ping_connection = true
521
518
522 ; ##########
519 ; ##########
523 ; VCS CONFIG
520 ; VCS CONFIG
524 ; ##########
521 ; ##########
525 vcs.server.enable = true
522 vcs.server.enable = true
526 vcs.server = vcsserver:10010
523 vcs.server = vcsserver:10010
527
524
528 ; Web server connectivity protocol, responsible for web based VCS operations
525 ; Web server connectivity protocol, responsible for web based VCS operations
529 ; Available protocols are:
526 ; Available protocols are:
530 ; `http` - use http-rpc backend (default)
527 ; `http` - use http-rpc backend (default)
531 vcs.server.protocol = http
528 vcs.server.protocol = http
532
529
533 ; Push/Pull operations protocol, available options are:
530 ; Push/Pull operations protocol, available options are:
534 ; `http` - use http-rpc backend (default)
531 ; `http` - use http-rpc backend (default)
535 vcs.scm_app_implementation = http
532 vcs.scm_app_implementation = http
536
533
537 ; Push/Pull operations hooks protocol, available options are:
534 ; Push/Pull operations hooks protocol, available options are:
538 ; `http` - use http-rpc backend (default)
535 ; `http` - use http-rpc backend (default)
539 vcs.hooks.protocol = http
536 vcs.hooks.protocol = http
540
537
541 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
538 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
542 ; accessible via network.
539 ; accessible via network.
543 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
540 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
544 vcs.hooks.host = *
541 vcs.hooks.host = *
545
542
546 ; Start VCSServer with this instance as a subprocess, useful for development
543 ; Start VCSServer with this instance as a subprocess, useful for development
547 vcs.start_server = false
544 vcs.start_server = false
548
545
549 ; List of enabled VCS backends, available options are:
546 ; List of enabled VCS backends, available options are:
550 ; `hg` - mercurial
547 ; `hg` - mercurial
551 ; `git` - git
548 ; `git` - git
552 ; `svn` - subversion
549 ; `svn` - subversion
553 vcs.backends = hg, git, svn
550 vcs.backends = hg, git, svn
554
551
555 ; Wait this number of seconds before killing connection to the vcsserver
552 ; Wait this number of seconds before killing connection to the vcsserver
556 vcs.connection_timeout = 3600
553 vcs.connection_timeout = 3600
557
554
558 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
555 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
559 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
556 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
560 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
557 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
561 #vcs.svn.compatible_version = 1.8
558 #vcs.svn.compatible_version = 1.8
562
559
563 ; Cache flag to cache vcsserver remote calls locally
560 ; Cache flag to cache vcsserver remote calls locally
564 ; It uses cache_region `cache_repo`
561 ; It uses cache_region `cache_repo`
565 vcs.methods.cache = false
562 vcs.methods.cache = false
566
563
567 ; ####################################################
564 ; ####################################################
568 ; Subversion proxy support (mod_dav_svn)
565 ; Subversion proxy support (mod_dav_svn)
569 ; Maps RhodeCode repo groups into SVN paths for Apache
566 ; Maps RhodeCode repo groups into SVN paths for Apache
570 ; ####################################################
567 ; ####################################################
571
568
572 ; Enable or disable the config file generation.
569 ; Enable or disable the config file generation.
573 svn.proxy.generate_config = false
570 svn.proxy.generate_config = false
574
571
575 ; Generate config file with `SVNListParentPath` set to `On`.
572 ; Generate config file with `SVNListParentPath` set to `On`.
576 svn.proxy.list_parent_path = true
573 svn.proxy.list_parent_path = true
577
574
578 ; Set location and file name of generated config file.
575 ; Set location and file name of generated config file.
579 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
576 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
580
577
581 ; alternative mod_dav config template. This needs to be a valid mako template
578 ; alternative mod_dav config template. This needs to be a valid mako template
582 ; Example template can be found in the source code:
579 ; Example template can be found in the source code:
583 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
580 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
584 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
581 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
585
582
586 ; Used as a prefix to the `Location` block in the generated config file.
583 ; Used as a prefix to the `Location` block in the generated config file.
587 ; In most cases it should be set to `/`.
584 ; In most cases it should be set to `/`.
588 svn.proxy.location_root = /
585 svn.proxy.location_root = /
589
586
590 ; Command to reload the mod dav svn configuration on change.
587 ; Command to reload the mod dav svn configuration on change.
591 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
588 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
592 ; Make sure user who runs RhodeCode process is allowed to reload Apache
589 ; Make sure user who runs RhodeCode process is allowed to reload Apache
593 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
590 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
594
591
595 ; If the timeout expires before the reload command finishes, the command will
592 ; If the timeout expires before the reload command finishes, the command will
596 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
593 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
597 #svn.proxy.reload_timeout = 10
594 #svn.proxy.reload_timeout = 10
598
595
599 ; ####################
596 ; ####################
600 ; SSH Support Settings
597 ; SSH Support Settings
601 ; ####################
598 ; ####################
602
599
603 ; Defines if a custom authorized_keys file should be created and written on
600 ; Defines if a custom authorized_keys file should be created and written on
604 ; any change user ssh keys. Setting this to false also disables possibility
601 ; any change user ssh keys. Setting this to false also disables possibility
605 ; of adding SSH keys by users from web interface. Super admins can still
602 ; of adding SSH keys by users from web interface. Super admins can still
606 ; manage SSH Keys.
603 ; manage SSH Keys.
607 ssh.generate_authorized_keyfile = true
604 ssh.generate_authorized_keyfile = true
608
605
609 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
606 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
610 # ssh.authorized_keys_ssh_opts =
607 # ssh.authorized_keys_ssh_opts =
611
608
612 ; Path to the authorized_keys file where the generate entries are placed.
609 ; Path to the authorized_keys file where the generate entries are placed.
613 ; It is possible to have multiple key files specified in `sshd_config` e.g.
610 ; It is possible to have multiple key files specified in `sshd_config` e.g.
614 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
611 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
615 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
612 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
616
613
617 ; Command to execute the SSH wrapper. The binary is available in the
614 ; Command to execute the SSH wrapper. The binary is available in the
618 ; RhodeCode installation directory.
615 ; RhodeCode installation directory.
619 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
616 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
620 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
617 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
621
618
622 ; Allow shell when executing the ssh-wrapper command
619 ; Allow shell when executing the ssh-wrapper command
623 ssh.wrapper_cmd_allow_shell = false
620 ssh.wrapper_cmd_allow_shell = false
624
621
625 ; Enables logging, and detailed output send back to the client during SSH
622 ; Enables logging, and detailed output send back to the client during SSH
626 ; operations. Useful for debugging, shouldn't be used in production.
623 ; operations. Useful for debugging, shouldn't be used in production.
627 ssh.enable_debug_logging = false
624 ssh.enable_debug_logging = false
628
625
629 ; Paths to binary executable, by default they are the names, but we can
626 ; Paths to binary executable, by default they are the names, but we can
630 ; override them if we want to use a custom one
627 ; override them if we want to use a custom one
631 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
628 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
632 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
629 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
633 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
630 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
634
631
635 ; Enables SSH key generator web interface. Disabling this still allows users
632 ; Enables SSH key generator web interface. Disabling this still allows users
636 ; to add their own keys.
633 ; to add their own keys.
637 ssh.enable_ui_key_generator = true
634 ssh.enable_ui_key_generator = true
638
635
639 ; Statsd client config, this is used to send metrics to statsd
636 ; Statsd client config, this is used to send metrics to statsd
640 ; We recommend setting statsd_exported and scrape them using Prometheus
637 ; We recommend setting statsd_exported and scrape them using Prometheus
641 #statsd.enabled = false
638 #statsd.enabled = false
642 #statsd.statsd_host = 0.0.0.0
639 #statsd.statsd_host = 0.0.0.0
643 #statsd.statsd_port = 8125
640 #statsd.statsd_port = 8125
644 #statsd.statsd_prefix =
641 #statsd.statsd_prefix =
645 #statsd.statsd_ipv6 = false
642 #statsd.statsd_ipv6 = false
646
643
647 ; configure logging automatically at server startup set to false
644 ; configure logging automatically at server startup set to false
648 ; to use the below custom logging config.
645 ; to use the below custom logging config.
649 ; RC_LOGGING_FORMATTER
646 ; RC_LOGGING_FORMATTER
650 ; RC_LOGGING_LEVEL
647 ; RC_LOGGING_LEVEL
651 ; env variables can control the settings for logging in case of autoconfigure
648 ; env variables can control the settings for logging in case of autoconfigure
652
649
653 logging.autoconfigure = false
650 logging.autoconfigure = false
654
651
655 ; specify your own custom logging config file to configure logging
652 ; specify your own custom logging config file to configure logging
656 #logging.logging_conf_file = /path/to/custom_logging.ini
653 #logging.logging_conf_file = /path/to/custom_logging.ini
657
654
658 ; Dummy marker to add new entries after.
655 ; Dummy marker to add new entries after.
659 ; Add any custom entries below. Please don't remove this marker.
656 ; Add any custom entries below. Please don't remove this marker.
660 custom.conf = 1
657 custom.conf = 1
661
658
662
659
663 ; #####################
660 ; #####################
664 ; LOGGING CONFIGURATION
661 ; LOGGING CONFIGURATION
665 ; #####################
662 ; #####################
666
663
667 [loggers]
664 [loggers]
668 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper, dogpile
665 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper, dogpile
669
666
670 [handlers]
667 [handlers]
671 keys = console, console_sql
668 keys = console, console_sql
672
669
673 [formatters]
670 [formatters]
674 keys = generic, json, color_formatter, color_formatter_sql
671 keys = generic, json, color_formatter, color_formatter_sql
675
672
676 ; #######
673 ; #######
677 ; LOGGERS
674 ; LOGGERS
678 ; #######
675 ; #######
679 [logger_root]
676 [logger_root]
680 level = NOTSET
677 level = NOTSET
681 handlers = console
678 handlers = console
682
679
683 [logger_routes]
680 [logger_routes]
684 level = DEBUG
681 level = DEBUG
685 handlers =
682 handlers =
686 qualname = routes.middleware
683 qualname = routes.middleware
687 ## "level = DEBUG" logs the route matched and routing variables.
684 ## "level = DEBUG" logs the route matched and routing variables.
688 propagate = 1
685 propagate = 1
689
686
690 [logger_sqlalchemy]
687 [logger_sqlalchemy]
691 level = INFO
688 level = INFO
692 handlers = console_sql
689 handlers = console_sql
693 qualname = sqlalchemy.engine
690 qualname = sqlalchemy.engine
694 propagate = 0
691 propagate = 0
695
692
696 [logger_beaker]
693 [logger_beaker]
697 level = DEBUG
694 level = DEBUG
698 handlers =
695 handlers =
699 qualname = beaker.container
696 qualname = beaker.container
700 propagate = 1
697 propagate = 1
701
698
702 [logger_dogpile]
699 [logger_dogpile]
703 level = INFO
700 level = INFO
704 handlers = console
701 handlers = console
705 qualname = dogpile
702 qualname = dogpile
706 propagate = 1
703 propagate = 1
707
704
708 [logger_rhodecode]
705 [logger_rhodecode]
709 level = DEBUG
706 level = DEBUG
710 handlers =
707 handlers =
711 qualname = rhodecode
708 qualname = rhodecode
712 propagate = 1
709 propagate = 1
713
710
714 [logger_ssh_wrapper]
711 [logger_ssh_wrapper]
715 level = DEBUG
712 level = DEBUG
716 handlers =
713 handlers =
717 qualname = ssh_wrapper
714 qualname = ssh_wrapper
718 propagate = 1
715 propagate = 1
719
716
720 [logger_celery]
717 [logger_celery]
721 level = DEBUG
718 level = DEBUG
722 handlers =
719 handlers =
723 qualname = celery
720 qualname = celery
724
721
725
722
726 ; ########
723 ; ########
727 ; HANDLERS
724 ; HANDLERS
728 ; ########
725 ; ########
729
726
730 [handler_console]
727 [handler_console]
731 class = StreamHandler
728 class = StreamHandler
732 args = (sys.stderr, )
729 args = (sys.stderr, )
733 level = DEBUG
730 level = DEBUG
734 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
731 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
735 ; This allows sending properly formatted logs to grafana loki or elasticsearch
732 ; This allows sending properly formatted logs to grafana loki or elasticsearch
736 formatter = generic
733 formatter = generic
737
734
738 [handler_console_sql]
735 [handler_console_sql]
739 ; "level = DEBUG" logs SQL queries and results.
736 ; "level = DEBUG" logs SQL queries and results.
740 ; "level = INFO" logs SQL queries.
737 ; "level = INFO" logs SQL queries.
741 ; "level = WARN" logs neither. (Recommended for production systems.)
738 ; "level = WARN" logs neither. (Recommended for production systems.)
742 class = StreamHandler
739 class = StreamHandler
743 args = (sys.stderr, )
740 args = (sys.stderr, )
744 level = WARN
741 level = WARN
745 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
742 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
746 ; This allows sending properly formatted logs to grafana loki or elasticsearch
743 ; This allows sending properly formatted logs to grafana loki or elasticsearch
747 formatter = generic
744 formatter = generic
748
745
749 ; ##########
746 ; ##########
750 ; FORMATTERS
747 ; FORMATTERS
751 ; ##########
748 ; ##########
752
749
753 [formatter_generic]
750 [formatter_generic]
754 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
751 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
755 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
752 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
756 datefmt = %Y-%m-%d %H:%M:%S
753 datefmt = %Y-%m-%d %H:%M:%S
757
754
758 [formatter_color_formatter]
755 [formatter_color_formatter]
759 class = rhodecode.lib.logging_formatter.ColorFormatter
756 class = rhodecode.lib.logging_formatter.ColorFormatter
760 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
757 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
761 datefmt = %Y-%m-%d %H:%M:%S
758 datefmt = %Y-%m-%d %H:%M:%S
762
759
763 [formatter_color_formatter_sql]
760 [formatter_color_formatter_sql]
764 class = rhodecode.lib.logging_formatter.ColorFormatterSql
761 class = rhodecode.lib.logging_formatter.ColorFormatterSql
765 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
762 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
766 datefmt = %Y-%m-%d %H:%M:%S
763 datefmt = %Y-%m-%d %H:%M:%S
767
764
768 [formatter_json]
765 [formatter_json]
769 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
766 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
770 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
767 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
General Comments 0
You need to be logged in to leave comments. Login now