##// END OF EJS Templates
Whoosh logging is now controlled by the .ini files logging setup
marcink -
r2102:04d26165 beta
parent child Browse files
Show More
@@ -1,294 +1,301 b''
1 ################################################################################
1 ################################################################################
2 ################################################################################
2 ################################################################################
3 # RhodeCode - Pylons environment configuration #
3 # RhodeCode - Pylons environment configuration #
4 # #
4 # #
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10 pdebug = false
10 pdebug = false
11 ################################################################################
11 ################################################################################
12 ## Uncomment and replace with the address which should receive ##
12 ## Uncomment and replace with the address which should receive ##
13 ## any error reports after application crash ##
13 ## any error reports after application crash ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 ################################################################################
15 ################################################################################
16 #email_to = admin@localhost
16 #email_to = admin@localhost
17 #error_email_from = paste_error@localhost
17 #error_email_from = paste_error@localhost
18 #app_email_from = rhodecode-noreply@localhost
18 #app_email_from = rhodecode-noreply@localhost
19 #error_message =
19 #error_message =
20 #email_prefix = [RhodeCode]
20 #email_prefix = [RhodeCode]
21
21
22 #smtp_server = mail.server.com
22 #smtp_server = mail.server.com
23 #smtp_username =
23 #smtp_username =
24 #smtp_password =
24 #smtp_password =
25 #smtp_port =
25 #smtp_port =
26 #smtp_use_tls = false
26 #smtp_use_tls = false
27 #smtp_use_ssl = true
27 #smtp_use_ssl = true
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 #smtp_auth =
29 #smtp_auth =
30
30
31 [server:main]
31 [server:main]
32 ##nr of threads to spawn
32 ##nr of threads to spawn
33 threadpool_workers = 5
33 threadpool_workers = 5
34
34
35 ##max request before thread respawn
35 ##max request before thread respawn
36 threadpool_max_requests = 10
36 threadpool_max_requests = 10
37
37
38 ##option to use threads of process
38 ##option to use threads of process
39 use_threadpool = true
39 use_threadpool = true
40
40
41 use = egg:Paste#http
41 use = egg:Paste#http
42 host = 0.0.0.0
42 host = 0.0.0.0
43 port = 5000
43 port = 5000
44
44
45 [app:main]
45 [app:main]
46 use = egg:rhodecode
46 use = egg:rhodecode
47 full_stack = true
47 full_stack = true
48 static_files = true
48 static_files = true
49 lang = en
49 lang = en
50 cache_dir = %(here)s/data
50 cache_dir = %(here)s/data
51 index_dir = %(here)s/data/index
51 index_dir = %(here)s/data/index
52 app_instance_uuid = rc-develop
52 app_instance_uuid = rc-develop
53 cut_off_limit = 256000
53 cut_off_limit = 256000
54 force_https = false
54 force_https = false
55 commit_parse_limit = 25
55 commit_parse_limit = 25
56 use_gravatar = true
56 use_gravatar = true
57 container_auth_enabled = false
57 container_auth_enabled = false
58 proxypass_auth_enabled = false
58 proxypass_auth_enabled = false
59 default_encoding = utf8
59 default_encoding = utf8
60
60
61 ## overwrite schema of clone url
61 ## overwrite schema of clone url
62 ## available vars:
62 ## available vars:
63 ## scheme - http/https
63 ## scheme - http/https
64 ## user - current user
64 ## user - current user
65 ## pass - password
65 ## pass - password
66 ## netloc - network location
66 ## netloc - network location
67 ## path - usually repo_name
67 ## path - usually repo_name
68
68
69 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
69 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
70
70
71 ## issue tracking mapping for commits messages
71 ## issue tracking mapping for commits messages
72 ## comment out issue_pat, issue_server, issue_prefix to enable
72 ## comment out issue_pat, issue_server, issue_prefix to enable
73
73
74 ## pattern to get the issues from commit messages
74 ## pattern to get the issues from commit messages
75 ## default one used here is #<numbers> with a regex passive group for `#`
75 ## default one used here is #<numbers> with a regex passive group for `#`
76 ## {id} will be all groups matched from this pattern
76 ## {id} will be all groups matched from this pattern
77
77
78 issue_pat = (?:\s*#)(\d+)
78 issue_pat = (?:\s*#)(\d+)
79
79
80 ## server url to the issue, each {id} will be replaced with match
80 ## server url to the issue, each {id} will be replaced with match
81 ## fetched from the regex and {repo} is replaced with repository name
81 ## fetched from the regex and {repo} is replaced with repository name
82
82
83 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
83 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
84
84
85 ## prefix to add to link to indicate it's an url
85 ## prefix to add to link to indicate it's an url
86 ## #314 will be replaced by <issue_prefix><id>
86 ## #314 will be replaced by <issue_prefix><id>
87
87
88 issue_prefix = #
88 issue_prefix = #
89
89
90 ## instance-id prefix
90 ## instance-id prefix
91 ## a prefix key for this instance used for cache invalidation when running
91 ## a prefix key for this instance used for cache invalidation when running
92 ## multiple instances of rhodecode, make sure it's globally unique for
92 ## multiple instances of rhodecode, make sure it's globally unique for
93 ## all running rhodecode instances. Leave empty if you don't use it
93 ## all running rhodecode instances. Leave empty if you don't use it
94 instance_id =
94 instance_id =
95
95
96 ####################################
96 ####################################
97 ### CELERY CONFIG ####
97 ### CELERY CONFIG ####
98 ####################################
98 ####################################
99 use_celery = false
99 use_celery = false
100 broker.host = localhost
100 broker.host = localhost
101 broker.vhost = rabbitmqhost
101 broker.vhost = rabbitmqhost
102 broker.port = 5672
102 broker.port = 5672
103 broker.user = rabbitmq
103 broker.user = rabbitmq
104 broker.password = qweqwe
104 broker.password = qweqwe
105
105
106 celery.imports = rhodecode.lib.celerylib.tasks
106 celery.imports = rhodecode.lib.celerylib.tasks
107
107
108 celery.result.backend = amqp
108 celery.result.backend = amqp
109 celery.result.dburi = amqp://
109 celery.result.dburi = amqp://
110 celery.result.serialier = json
110 celery.result.serialier = json
111
111
112 #celery.send.task.error.emails = true
112 #celery.send.task.error.emails = true
113 #celery.amqp.task.result.expires = 18000
113 #celery.amqp.task.result.expires = 18000
114
114
115 celeryd.concurrency = 2
115 celeryd.concurrency = 2
116 #celeryd.log.file = celeryd.log
116 #celeryd.log.file = celeryd.log
117 celeryd.log.level = debug
117 celeryd.log.level = debug
118 celeryd.max.tasks.per.child = 1
118 celeryd.max.tasks.per.child = 1
119
119
120 #tasks will never be sent to the queue, but executed locally instead.
120 #tasks will never be sent to the queue, but executed locally instead.
121 celery.always.eager = false
121 celery.always.eager = false
122
122
123 ####################################
123 ####################################
124 ### BEAKER CACHE ####
124 ### BEAKER CACHE ####
125 ####################################
125 ####################################
126 beaker.cache.data_dir=%(here)s/data/cache/data
126 beaker.cache.data_dir=%(here)s/data/cache/data
127 beaker.cache.lock_dir=%(here)s/data/cache/lock
127 beaker.cache.lock_dir=%(here)s/data/cache/lock
128
128
129 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
129 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
130
130
131 beaker.cache.super_short_term.type=memory
131 beaker.cache.super_short_term.type=memory
132 beaker.cache.super_short_term.expire=10
132 beaker.cache.super_short_term.expire=10
133 beaker.cache.super_short_term.key_length = 256
133 beaker.cache.super_short_term.key_length = 256
134
134
135 beaker.cache.short_term.type=memory
135 beaker.cache.short_term.type=memory
136 beaker.cache.short_term.expire=60
136 beaker.cache.short_term.expire=60
137 beaker.cache.short_term.key_length = 256
137 beaker.cache.short_term.key_length = 256
138
138
139 beaker.cache.long_term.type=memory
139 beaker.cache.long_term.type=memory
140 beaker.cache.long_term.expire=36000
140 beaker.cache.long_term.expire=36000
141 beaker.cache.long_term.key_length = 256
141 beaker.cache.long_term.key_length = 256
142
142
143 beaker.cache.sql_cache_short.type=memory
143 beaker.cache.sql_cache_short.type=memory
144 beaker.cache.sql_cache_short.expire=10
144 beaker.cache.sql_cache_short.expire=10
145 beaker.cache.sql_cache_short.key_length = 256
145 beaker.cache.sql_cache_short.key_length = 256
146
146
147 beaker.cache.sql_cache_med.type=memory
147 beaker.cache.sql_cache_med.type=memory
148 beaker.cache.sql_cache_med.expire=360
148 beaker.cache.sql_cache_med.expire=360
149 beaker.cache.sql_cache_med.key_length = 256
149 beaker.cache.sql_cache_med.key_length = 256
150
150
151 beaker.cache.sql_cache_long.type=file
151 beaker.cache.sql_cache_long.type=file
152 beaker.cache.sql_cache_long.expire=3600
152 beaker.cache.sql_cache_long.expire=3600
153 beaker.cache.sql_cache_long.key_length = 256
153 beaker.cache.sql_cache_long.key_length = 256
154
154
155 ####################################
155 ####################################
156 ### BEAKER SESSION ####
156 ### BEAKER SESSION ####
157 ####################################
157 ####################################
158 ## Type of storage used for the session, current types are
158 ## Type of storage used for the session, current types are
159 ## dbm, file, memcached, database, and memory.
159 ## dbm, file, memcached, database, and memory.
160 ## The storage uses the Container API
160 ## The storage uses the Container API
161 ## that is also used by the cache system.
161 ## that is also used by the cache system.
162
162
163 ## db session example
163 ## db session example
164
164
165 #beaker.session.type = ext:database
165 #beaker.session.type = ext:database
166 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
166 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
167 #beaker.session.table_name = db_session
167 #beaker.session.table_name = db_session
168
168
169 ## encrypted cookie session, good for many instances
169 ## encrypted cookie session, good for many instances
170 #beaker.session.type = cookie
170 #beaker.session.type = cookie
171
171
172 beaker.session.type = file
172 beaker.session.type = file
173 beaker.session.key = rhodecode
173 beaker.session.key = rhodecode
174 # secure cookie requires AES python libraries
174 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
175 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
175 #beaker.session.validate_key = 9712sds2212c--zxc123
176 #beaker.session.validate_key = 9712sds2212c--zxc123
176 beaker.session.timeout = 36000
177 beaker.session.timeout = 36000
177 beaker.session.httponly = true
178 beaker.session.httponly = true
178
179
179 ## uncomment for https secure cookie
180 ## uncomment for https secure cookie
180 beaker.session.secure = false
181 beaker.session.secure = false
181
182
182 ##auto save the session to not to use .save()
183 ##auto save the session to not to use .save()
183 beaker.session.auto = False
184 beaker.session.auto = False
184
185
185 ##true exire at browser close
186 ##true exire at browser close
186 #beaker.session.cookie_expires = 3600
187 #beaker.session.cookie_expires = 3600
187
188
188
189
189 ################################################################################
190 ################################################################################
190 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
191 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
191 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
192 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
192 ## execute malicious code after an exception is raised. ##
193 ## execute malicious code after an exception is raised. ##
193 ################################################################################
194 ################################################################################
194 #set debug = false
195 #set debug = false
195
196
196 ##################################
197 ##################################
197 ### LOGVIEW CONFIG ###
198 ### LOGVIEW CONFIG ###
198 ##################################
199 ##################################
199 logview.sqlalchemy = #faa
200 logview.sqlalchemy = #faa
200 logview.pylons.templating = #bfb
201 logview.pylons.templating = #bfb
201 logview.pylons.util = #eee
202 logview.pylons.util = #eee
202
203
203 #########################################################
204 #########################################################
204 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
205 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
205 #########################################################
206 #########################################################
206 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
207 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
207 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
208 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
208 sqlalchemy.db1.echo = false
209 sqlalchemy.db1.echo = false
209 sqlalchemy.db1.pool_recycle = 3600
210 sqlalchemy.db1.pool_recycle = 3600
210 sqlalchemy.convert_unicode = true
211 sqlalchemy.convert_unicode = true
211
212
212 ################################
213 ################################
213 ### LOGGING CONFIGURATION ####
214 ### LOGGING CONFIGURATION ####
214 ################################
215 ################################
215 [loggers]
216 [loggers]
216 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
217 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
217
218
218 [handlers]
219 [handlers]
219 keys = console, console_sql
220 keys = console, console_sql
220
221
221 [formatters]
222 [formatters]
222 keys = generic, color_formatter, color_formatter_sql
223 keys = generic, color_formatter, color_formatter_sql
223
224
224 #############
225 #############
225 ## LOGGERS ##
226 ## LOGGERS ##
226 #############
227 #############
227 [logger_root]
228 [logger_root]
228 level = NOTSET
229 level = NOTSET
229 handlers = console
230 handlers = console
230
231
231 [logger_routes]
232 [logger_routes]
232 level = DEBUG
233 level = DEBUG
233 handlers =
234 handlers =
234 qualname = routes.middleware
235 qualname = routes.middleware
235 # "level = DEBUG" logs the route matched and routing variables.
236 # "level = DEBUG" logs the route matched and routing variables.
236 propagate = 1
237 propagate = 1
237
238
238 [logger_beaker]
239 [logger_beaker]
239 level = DEBUG
240 level = DEBUG
240 handlers =
241 handlers =
241 qualname = beaker.container
242 qualname = beaker.container
242 propagate = 1
243 propagate = 1
243
244
244 [logger_templates]
245 [logger_templates]
245 level = INFO
246 level = INFO
246 handlers =
247 handlers =
247 qualname = pylons.templating
248 qualname = pylons.templating
248 propagate = 1
249 propagate = 1
249
250
250 [logger_rhodecode]
251 [logger_rhodecode]
251 level = DEBUG
252 level = DEBUG
252 handlers =
253 handlers =
253 qualname = rhodecode
254 qualname = rhodecode
254 propagate = 1
255 propagate = 1
255
256
256 [logger_sqlalchemy]
257 [logger_sqlalchemy]
257 level = INFO
258 level = INFO
258 handlers = console_sql
259 handlers = console_sql
259 qualname = sqlalchemy.engine
260 qualname = sqlalchemy.engine
260 propagate = 0
261 propagate = 0
261
262
263 [logger_whoosh_indexer]
264 level = DEBUG
265 handlers =
266 qualname = whoosh_indexer
267 propagate = 1
268
262 ##############
269 ##############
263 ## HANDLERS ##
270 ## HANDLERS ##
264 ##############
271 ##############
265
272
266 [handler_console]
273 [handler_console]
267 class = StreamHandler
274 class = StreamHandler
268 args = (sys.stderr,)
275 args = (sys.stderr,)
269 level = DEBUG
276 level = DEBUG
270 formatter = color_formatter
277 formatter = color_formatter
271
278
272 [handler_console_sql]
279 [handler_console_sql]
273 class = StreamHandler
280 class = StreamHandler
274 args = (sys.stderr,)
281 args = (sys.stderr,)
275 level = DEBUG
282 level = DEBUG
276 formatter = color_formatter_sql
283 formatter = color_formatter_sql
277
284
278 ################
285 ################
279 ## FORMATTERS ##
286 ## FORMATTERS ##
280 ################
287 ################
281
288
282 [formatter_generic]
289 [formatter_generic]
283 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
290 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
284 datefmt = %Y-%m-%d %H:%M:%S
291 datefmt = %Y-%m-%d %H:%M:%S
285
292
286 [formatter_color_formatter]
293 [formatter_color_formatter]
287 class=rhodecode.lib.colored_formatter.ColorFormatter
294 class=rhodecode.lib.colored_formatter.ColorFormatter
288 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
295 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
289 datefmt = %Y-%m-%d %H:%M:%S
296 datefmt = %Y-%m-%d %H:%M:%S
290
297
291 [formatter_color_formatter_sql]
298 [formatter_color_formatter_sql]
292 class=rhodecode.lib.colored_formatter.ColorFormatterSql
299 class=rhodecode.lib.colored_formatter.ColorFormatterSql
293 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
300 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
294 datefmt = %Y-%m-%d %H:%M:%S
301 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,295 +1,301 b''
1 ################################################################################
1 ################################################################################
2 ################################################################################
2 ################################################################################
3 # RhodeCode - Pylons environment configuration #
3 # RhodeCode - Pylons environment configuration #
4 # #
4 # #
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10 pdebug = false
10 pdebug = false
11 ################################################################################
11 ################################################################################
12 ## Uncomment and replace with the address which should receive ##
12 ## Uncomment and replace with the address which should receive ##
13 ## any error reports after application crash ##
13 ## any error reports after application crash ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 ################################################################################
15 ################################################################################
16 #email_to = admin@localhost
16 #email_to = admin@localhost
17 #error_email_from = paste_error@localhost
17 #error_email_from = paste_error@localhost
18 #app_email_from = rhodecode-noreply@localhost
18 #app_email_from = rhodecode-noreply@localhost
19 #error_message =
19 #error_message =
20 #email_prefix = [RhodeCode]
20 #email_prefix = [RhodeCode]
21
21
22 #smtp_server = mail.server.com
22 #smtp_server = mail.server.com
23 #smtp_username =
23 #smtp_username =
24 #smtp_password =
24 #smtp_password =
25 #smtp_port =
25 #smtp_port =
26 #smtp_use_tls = false
26 #smtp_use_tls = false
27 #smtp_use_ssl = true
27 #smtp_use_ssl = true
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 #smtp_auth =
29 #smtp_auth =
30
30
31 [server:main]
31 [server:main]
32 ##nr of threads to spawn
32 ##nr of threads to spawn
33 threadpool_workers = 5
33 threadpool_workers = 5
34
34
35 ##max request before thread respawn
35 ##max request before thread respawn
36 threadpool_max_requests = 10
36 threadpool_max_requests = 10
37
37
38 ##option to use threads of process
38 ##option to use threads of process
39 use_threadpool = true
39 use_threadpool = true
40
40
41 use = egg:Paste#http
41 use = egg:Paste#http
42 host = 127.0.0.1
42 host = 127.0.0.1
43 port = 8001
43 port = 8001
44
44
45 [app:main]
45 [app:main]
46 use = egg:rhodecode
46 use = egg:rhodecode
47 full_stack = true
47 full_stack = true
48 static_files = true
48 static_files = true
49 lang = en
49 lang = en
50 cache_dir = %(here)s/data
50 cache_dir = %(here)s/data
51 index_dir = %(here)s/data/index
51 index_dir = %(here)s/data/index
52 app_instance_uuid = rc-production
52 app_instance_uuid = rc-production
53 cut_off_limit = 256000
53 cut_off_limit = 256000
54 force_https = false
54 force_https = false
55 commit_parse_limit = 50
55 commit_parse_limit = 50
56 use_gravatar = true
56 use_gravatar = true
57 container_auth_enabled = false
57 container_auth_enabled = false
58 proxypass_auth_enabled = false
58 proxypass_auth_enabled = false
59 default_encoding = utf8
59 default_encoding = utf8
60
60
61 ## overwrite schema of clone url
61 ## overwrite schema of clone url
62 ## available vars:
62 ## available vars:
63 ## scheme - http/https
63 ## scheme - http/https
64 ## user - current user
64 ## user - current user
65 ## pass - password
65 ## pass - password
66 ## netloc - network location
66 ## netloc - network location
67 ## path - usually repo_name
67 ## path - usually repo_name
68
68
69 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
69 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
70
70
71 ## issue tracking mapping for commits messages
71 ## issue tracking mapping for commits messages
72 ## comment out issue_pat, issue_server, issue_prefix to enable
72 ## comment out issue_pat, issue_server, issue_prefix to enable
73
73
74 ## pattern to get the issues from commit messages
74 ## pattern to get the issues from commit messages
75 ## default one used here is #<numbers> with a regex passive group for `#`
75 ## default one used here is #<numbers> with a regex passive group for `#`
76 ## {id} will be all groups matched from this pattern
76 ## {id} will be all groups matched from this pattern
77
77
78 issue_pat = (?:\s*#)(\d+)
78 issue_pat = (?:\s*#)(\d+)
79
79
80 ## server url to the issue, each {id} will be replaced with match
80 ## server url to the issue, each {id} will be replaced with match
81 ## fetched from the regex and {repo} is replaced with repository name
81 ## fetched from the regex and {repo} is replaced with repository name
82
82
83 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
83 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
84
84
85 ## prefix to add to link to indicate it's an url
85 ## prefix to add to link to indicate it's an url
86 ## #314 will be replaced by <issue_prefix><id>
86 ## #314 will be replaced by <issue_prefix><id>
87
87
88 issue_prefix = #
88 issue_prefix = #
89
89
90 ## instance-id prefix
90 ## instance-id prefix
91 ## a prefix key for this instance used for cache invalidation when running
91 ## a prefix key for this instance used for cache invalidation when running
92 ## multiple instances of rhodecode, make sure it's globally unique for
92 ## multiple instances of rhodecode, make sure it's globally unique for
93 ## all running rhodecode instances. Leave empty if you don't use it
93 ## all running rhodecode instances. Leave empty if you don't use it
94 instance_id =
94 instance_id =
95
95
96 ####################################
96 ####################################
97 ### CELERY CONFIG ####
97 ### CELERY CONFIG ####
98 ####################################
98 ####################################
99 use_celery = false
99 use_celery = false
100 broker.host = localhost
100 broker.host = localhost
101 broker.vhost = rabbitmqhost
101 broker.vhost = rabbitmqhost
102 broker.port = 5672
102 broker.port = 5672
103 broker.user = rabbitmq
103 broker.user = rabbitmq
104 broker.password = qweqwe
104 broker.password = qweqwe
105
105
106 celery.imports = rhodecode.lib.celerylib.tasks
106 celery.imports = rhodecode.lib.celerylib.tasks
107
107
108 celery.result.backend = amqp
108 celery.result.backend = amqp
109 celery.result.dburi = amqp://
109 celery.result.dburi = amqp://
110 celery.result.serialier = json
110 celery.result.serialier = json
111
111
112 #celery.send.task.error.emails = true
112 #celery.send.task.error.emails = true
113 #celery.amqp.task.result.expires = 18000
113 #celery.amqp.task.result.expires = 18000
114
114
115 celeryd.concurrency = 2
115 celeryd.concurrency = 2
116 #celeryd.log.file = celeryd.log
116 #celeryd.log.file = celeryd.log
117 celeryd.log.level = debug
117 celeryd.log.level = debug
118 celeryd.max.tasks.per.child = 1
118 celeryd.max.tasks.per.child = 1
119
119
120 #tasks will never be sent to the queue, but executed locally instead.
120 #tasks will never be sent to the queue, but executed locally instead.
121 celery.always.eager = false
121 celery.always.eager = false
122
122
123 ####################################
123 ####################################
124 ### BEAKER CACHE ####
124 ### BEAKER CACHE ####
125 ####################################
125 ####################################
126 beaker.cache.data_dir=%(here)s/data/cache/data
126 beaker.cache.data_dir=%(here)s/data/cache/data
127 beaker.cache.lock_dir=%(here)s/data/cache/lock
127 beaker.cache.lock_dir=%(here)s/data/cache/lock
128
128
129 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
129 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
130
130
131 beaker.cache.super_short_term.type=memory
131 beaker.cache.super_short_term.type=memory
132 beaker.cache.super_short_term.expire=10
132 beaker.cache.super_short_term.expire=10
133 beaker.cache.super_short_term.key_length = 256
133 beaker.cache.super_short_term.key_length = 256
134
134
135 beaker.cache.short_term.type=memory
135 beaker.cache.short_term.type=memory
136 beaker.cache.short_term.expire=60
136 beaker.cache.short_term.expire=60
137 beaker.cache.short_term.key_length = 256
137 beaker.cache.short_term.key_length = 256
138
138
139 beaker.cache.long_term.type=memory
139 beaker.cache.long_term.type=memory
140 beaker.cache.long_term.expire=36000
140 beaker.cache.long_term.expire=36000
141 beaker.cache.long_term.key_length = 256
141 beaker.cache.long_term.key_length = 256
142
142
143 beaker.cache.sql_cache_short.type=memory
143 beaker.cache.sql_cache_short.type=memory
144 beaker.cache.sql_cache_short.expire=10
144 beaker.cache.sql_cache_short.expire=10
145 beaker.cache.sql_cache_short.key_length = 256
145 beaker.cache.sql_cache_short.key_length = 256
146
146
147 beaker.cache.sql_cache_med.type=memory
147 beaker.cache.sql_cache_med.type=memory
148 beaker.cache.sql_cache_med.expire=360
148 beaker.cache.sql_cache_med.expire=360
149 beaker.cache.sql_cache_med.key_length = 256
149 beaker.cache.sql_cache_med.key_length = 256
150
150
151 beaker.cache.sql_cache_long.type=file
151 beaker.cache.sql_cache_long.type=file
152 beaker.cache.sql_cache_long.expire=3600
152 beaker.cache.sql_cache_long.expire=3600
153 beaker.cache.sql_cache_long.key_length = 256
153 beaker.cache.sql_cache_long.key_length = 256
154
154
155 ####################################
155 ####################################
156 ### BEAKER SESSION ####
156 ### BEAKER SESSION ####
157 ####################################
157 ####################################
158 ## Type of storage used for the session, current types are
158 ## Type of storage used for the session, current types are
159 ## dbm, file, memcached, database, and memory.
159 ## dbm, file, memcached, database, and memory.
160 ## The storage uses the Container API
160 ## The storage uses the Container API
161 ## that is also used by the cache system.
161 ## that is also used by the cache system.
162
162
163 ## db session example
163 ## db session example
164
164
165 #beaker.session.type = ext:database
165 #beaker.session.type = ext:database
166 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
166 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
167 #beaker.session.table_name = db_session
167 #beaker.session.table_name = db_session
168
168
169 ## encrypted cookie session, good for many instances
169 ## encrypted cookie session, good for many instances
170 #beaker.session.type = cookie
170 #beaker.session.type = cookie
171
171
172 beaker.session.type = file
172 beaker.session.type = file
173 beaker.session.key = rhodecode
173 beaker.session.key = rhodecode
174 # secure cookie requires AES python libraries
174 # secure cookie requires AES python libraries
175 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
175 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
176 #beaker.session.validate_key = 9712sds2212c--zxc123
176 #beaker.session.validate_key = 9712sds2212c--zxc123
177 beaker.session.timeout = 36000
177 beaker.session.timeout = 36000
178 beaker.session.httponly = true
178 beaker.session.httponly = true
179
179
180 ## uncomment for https secure cookie
180 ## uncomment for https secure cookie
181 beaker.session.secure = false
181 beaker.session.secure = false
182
182
183 ##auto save the session to not to use .save()
183 ##auto save the session to not to use .save()
184 beaker.session.auto = False
184 beaker.session.auto = False
185
185
186 ##true exire at browser close
186 ##true exire at browser close
187 #beaker.session.cookie_expires = 3600
187 #beaker.session.cookie_expires = 3600
188
188
189
189
190 ################################################################################
190 ################################################################################
191 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
191 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
192 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
192 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
193 ## execute malicious code after an exception is raised. ##
193 ## execute malicious code after an exception is raised. ##
194 ################################################################################
194 ################################################################################
195 set debug = false
195 set debug = false
196
196
197 ##################################
197 ##################################
198 ### LOGVIEW CONFIG ###
198 ### LOGVIEW CONFIG ###
199 ##################################
199 ##################################
200 logview.sqlalchemy = #faa
200 logview.sqlalchemy = #faa
201 logview.pylons.templating = #bfb
201 logview.pylons.templating = #bfb
202 logview.pylons.util = #eee
202 logview.pylons.util = #eee
203
203
204 #########################################################
204 #########################################################
205 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
205 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
206 #########################################################
206 #########################################################
207 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
207 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
208 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
208 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
209 sqlalchemy.db1.echo = false
209 sqlalchemy.db1.echo = false
210 sqlalchemy.db1.pool_recycle = 3600
210 sqlalchemy.db1.pool_recycle = 3600
211 sqlalchemy.convert_unicode = true
211 sqlalchemy.convert_unicode = true
212
212
213 ################################
213 ################################
214 ### LOGGING CONFIGURATION ####
214 ### LOGGING CONFIGURATION ####
215 ################################
215 ################################
216 [loggers]
216 [loggers]
217 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
217 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
218
218
219 [handlers]
219 [handlers]
220 keys = console, console_sql
220 keys = console, console_sql
221
221
222 [formatters]
222 [formatters]
223 keys = generic, color_formatter, color_formatter_sql
223 keys = generic, color_formatter, color_formatter_sql
224
224
225 #############
225 #############
226 ## LOGGERS ##
226 ## LOGGERS ##
227 #############
227 #############
228 [logger_root]
228 [logger_root]
229 level = NOTSET
229 level = NOTSET
230 handlers = console
230 handlers = console
231
231
232 [logger_routes]
232 [logger_routes]
233 level = DEBUG
233 level = DEBUG
234 handlers =
234 handlers =
235 qualname = routes.middleware
235 qualname = routes.middleware
236 # "level = DEBUG" logs the route matched and routing variables.
236 # "level = DEBUG" logs the route matched and routing variables.
237 propagate = 1
237 propagate = 1
238
238
239 [logger_beaker]
239 [logger_beaker]
240 level = DEBUG
240 level = DEBUG
241 handlers =
241 handlers =
242 qualname = beaker.container
242 qualname = beaker.container
243 propagate = 1
243 propagate = 1
244
244
245 [logger_templates]
245 [logger_templates]
246 level = INFO
246 level = INFO
247 handlers =
247 handlers =
248 qualname = pylons.templating
248 qualname = pylons.templating
249 propagate = 1
249 propagate = 1
250
250
251 [logger_rhodecode]
251 [logger_rhodecode]
252 level = DEBUG
252 level = DEBUG
253 handlers =
253 handlers =
254 qualname = rhodecode
254 qualname = rhodecode
255 propagate = 1
255 propagate = 1
256
256
257 [logger_sqlalchemy]
257 [logger_sqlalchemy]
258 level = INFO
258 level = INFO
259 handlers = console_sql
259 handlers = console_sql
260 qualname = sqlalchemy.engine
260 qualname = sqlalchemy.engine
261 propagate = 0
261 propagate = 0
262
262
263 [logger_whoosh_indexer]
264 level = DEBUG
265 handlers =
266 qualname = whoosh_indexer
267 propagate = 1
268
263 ##############
269 ##############
264 ## HANDLERS ##
270 ## HANDLERS ##
265 ##############
271 ##############
266
272
267 [handler_console]
273 [handler_console]
268 class = StreamHandler
274 class = StreamHandler
269 args = (sys.stderr,)
275 args = (sys.stderr,)
270 level = INFO
276 level = INFO
271 formatter = generic
277 formatter = generic
272
278
273 [handler_console_sql]
279 [handler_console_sql]
274 class = StreamHandler
280 class = StreamHandler
275 args = (sys.stderr,)
281 args = (sys.stderr,)
276 level = WARN
282 level = WARN
277 formatter = generic
283 formatter = generic
278
284
279 ################
285 ################
280 ## FORMATTERS ##
286 ## FORMATTERS ##
281 ################
287 ################
282
288
283 [formatter_generic]
289 [formatter_generic]
284 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
290 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
285 datefmt = %Y-%m-%d %H:%M:%S
291 datefmt = %Y-%m-%d %H:%M:%S
286
292
287 [formatter_color_formatter]
293 [formatter_color_formatter]
288 class=rhodecode.lib.colored_formatter.ColorFormatter
294 class=rhodecode.lib.colored_formatter.ColorFormatter
289 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
295 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
290 datefmt = %Y-%m-%d %H:%M:%S
296 datefmt = %Y-%m-%d %H:%M:%S
291
297
292 [formatter_color_formatter_sql]
298 [formatter_color_formatter_sql]
293 class=rhodecode.lib.colored_formatter.ColorFormatterSql
299 class=rhodecode.lib.colored_formatter.ColorFormatterSql
294 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
300 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
295 datefmt = %Y-%m-%d %H:%M:%S
301 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,305 +1,311 b''
1 ################################################################################
1 ################################################################################
2 ################################################################################
2 ################################################################################
3 # RhodeCode - Pylons environment configuration #
3 # RhodeCode - Pylons environment configuration #
4 # #
4 # #
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10 pdebug = false
10 pdebug = false
11 ################################################################################
11 ################################################################################
12 ## Uncomment and replace with the address which should receive ##
12 ## Uncomment and replace with the address which should receive ##
13 ## any error reports after application crash ##
13 ## any error reports after application crash ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 ################################################################################
15 ################################################################################
16 #email_to = admin@localhost
16 #email_to = admin@localhost
17 #error_email_from = paste_error@localhost
17 #error_email_from = paste_error@localhost
18 #app_email_from = rhodecode-noreply@localhost
18 #app_email_from = rhodecode-noreply@localhost
19 #error_message =
19 #error_message =
20 #email_prefix = [RhodeCode]
20 #email_prefix = [RhodeCode]
21
21
22 #smtp_server = mail.server.com
22 #smtp_server = mail.server.com
23 #smtp_username =
23 #smtp_username =
24 #smtp_password =
24 #smtp_password =
25 #smtp_port =
25 #smtp_port =
26 #smtp_use_tls = false
26 #smtp_use_tls = false
27 #smtp_use_ssl = true
27 #smtp_use_ssl = true
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 #smtp_auth =
29 #smtp_auth =
30
30
31 [server:main]
31 [server:main]
32 ##nr of threads to spawn
32 ##nr of threads to spawn
33 threadpool_workers = 5
33 threadpool_workers = 5
34
34
35 ##max request before thread respawn
35 ##max request before thread respawn
36 threadpool_max_requests = 10
36 threadpool_max_requests = 10
37
37
38 ##option to use threads of process
38 ##option to use threads of process
39 use_threadpool = true
39 use_threadpool = true
40
40
41 use = egg:Paste#http
41 use = egg:Paste#http
42 host = 127.0.0.1
42 host = 127.0.0.1
43 port = 5000
43 port = 5000
44
44
45 [app:main]
45 [app:main]
46 use = egg:rhodecode
46 use = egg:rhodecode
47 full_stack = true
47 full_stack = true
48 static_files = true
48 static_files = true
49 lang = en
49 lang = en
50 cache_dir = %(here)s/data
50 cache_dir = %(here)s/data
51 index_dir = %(here)s/data/index
51 index_dir = %(here)s/data/index
52 app_instance_uuid = ${app_instance_uuid}
52 app_instance_uuid = ${app_instance_uuid}
53 cut_off_limit = 256000
53 cut_off_limit = 256000
54 force_https = false
54 force_https = false
55 commit_parse_limit = 50
55 commit_parse_limit = 50
56 use_gravatar = true
56 use_gravatar = true
57 container_auth_enabled = false
57 container_auth_enabled = false
58 proxypass_auth_enabled = false
58 proxypass_auth_enabled = false
59 default_encoding = utf8
59 default_encoding = utf8
60
60
61 ## overwrite schema of clone url
61 ## overwrite schema of clone url
62 ## available vars:
62 ## available vars:
63 ## scheme - http/https
63 ## scheme - http/https
64 ## user - current user
64 ## user - current user
65 ## pass - password
65 ## pass - password
66 ## netloc - network location
66 ## netloc - network location
67 ## path - usually repo_name
67 ## path - usually repo_name
68
68
69 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
69 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
70
70
71 ## issue tracking mapping for commits messages
71 ## issue tracking mapping for commits messages
72 ## comment out issue_pat, issue_server, issue_prefix to enable
72 ## comment out issue_pat, issue_server, issue_prefix to enable
73
73
74 ## pattern to get the issues from commit messages
74 ## pattern to get the issues from commit messages
75 ## default one used here is #<numbers> with a regex passive group for `#`
75 ## default one used here is #<numbers> with a regex passive group for `#`
76 ## {id} will be all groups matched from this pattern
76 ## {id} will be all groups matched from this pattern
77
77
78 issue_pat = (?:\s*#)(\d+)
78 issue_pat = (?:\s*#)(\d+)
79
79
80 ## server url to the issue, each {id} will be replaced with match
80 ## server url to the issue, each {id} will be replaced with match
81 ## fetched from the regex and {repo} is replaced with repository name
81 ## fetched from the regex and {repo} is replaced with repository name
82
82
83 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
83 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
84
84
85 ## prefix to add to link to indicate it's an url
85 ## prefix to add to link to indicate it's an url
86 ## #314 will be replaced by <issue_prefix><id>
86 ## #314 will be replaced by <issue_prefix><id>
87
87
88 issue_prefix = #
88 issue_prefix = #
89
89
90 ## instance-id prefix
90 ## instance-id prefix
91 ## a prefix key for this instance used for cache invalidation when running
91 ## a prefix key for this instance used for cache invalidation when running
92 ## multiple instances of rhodecode, make sure it's globally unique for
92 ## multiple instances of rhodecode, make sure it's globally unique for
93 ## all running rhodecode instances. Leave empty if you don't use it
93 ## all running rhodecode instances. Leave empty if you don't use it
94 instance_id =
94 instance_id =
95
95
96 ####################################
96 ####################################
97 ### CELERY CONFIG ####
97 ### CELERY CONFIG ####
98 ####################################
98 ####################################
99 use_celery = false
99 use_celery = false
100 broker.host = localhost
100 broker.host = localhost
101 broker.vhost = rabbitmqhost
101 broker.vhost = rabbitmqhost
102 broker.port = 5672
102 broker.port = 5672
103 broker.user = rabbitmq
103 broker.user = rabbitmq
104 broker.password = qweqwe
104 broker.password = qweqwe
105
105
106 celery.imports = rhodecode.lib.celerylib.tasks
106 celery.imports = rhodecode.lib.celerylib.tasks
107
107
108 celery.result.backend = amqp
108 celery.result.backend = amqp
109 celery.result.dburi = amqp://
109 celery.result.dburi = amqp://
110 celery.result.serialier = json
110 celery.result.serialier = json
111
111
112 #celery.send.task.error.emails = true
112 #celery.send.task.error.emails = true
113 #celery.amqp.task.result.expires = 18000
113 #celery.amqp.task.result.expires = 18000
114
114
115 celeryd.concurrency = 2
115 celeryd.concurrency = 2
116 #celeryd.log.file = celeryd.log
116 #celeryd.log.file = celeryd.log
117 celeryd.log.level = debug
117 celeryd.log.level = debug
118 celeryd.max.tasks.per.child = 1
118 celeryd.max.tasks.per.child = 1
119
119
120 #tasks will never be sent to the queue, but executed locally instead.
120 #tasks will never be sent to the queue, but executed locally instead.
121 celery.always.eager = false
121 celery.always.eager = false
122
122
123 ####################################
123 ####################################
124 ### BEAKER CACHE ####
124 ### BEAKER CACHE ####
125 ####################################
125 ####################################
126 beaker.cache.data_dir=%(here)s/data/cache/data
126 beaker.cache.data_dir=%(here)s/data/cache/data
127 beaker.cache.lock_dir=%(here)s/data/cache/lock
127 beaker.cache.lock_dir=%(here)s/data/cache/lock
128
128
129 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
129 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
130
130
131 beaker.cache.super_short_term.type=memory
131 beaker.cache.super_short_term.type=memory
132 beaker.cache.super_short_term.expire=10
132 beaker.cache.super_short_term.expire=10
133 beaker.cache.super_short_term.key_length = 256
133 beaker.cache.super_short_term.key_length = 256
134
134
135 beaker.cache.short_term.type=memory
135 beaker.cache.short_term.type=memory
136 beaker.cache.short_term.expire=60
136 beaker.cache.short_term.expire=60
137 beaker.cache.short_term.key_length = 256
137 beaker.cache.short_term.key_length = 256
138
138
139 beaker.cache.long_term.type=memory
139 beaker.cache.long_term.type=memory
140 beaker.cache.long_term.expire=36000
140 beaker.cache.long_term.expire=36000
141 beaker.cache.long_term.key_length = 256
141 beaker.cache.long_term.key_length = 256
142
142
143 beaker.cache.sql_cache_short.type=memory
143 beaker.cache.sql_cache_short.type=memory
144 beaker.cache.sql_cache_short.expire=10
144 beaker.cache.sql_cache_short.expire=10
145 beaker.cache.sql_cache_short.key_length = 256
145 beaker.cache.sql_cache_short.key_length = 256
146
146
147 beaker.cache.sql_cache_med.type=memory
147 beaker.cache.sql_cache_med.type=memory
148 beaker.cache.sql_cache_med.expire=360
148 beaker.cache.sql_cache_med.expire=360
149 beaker.cache.sql_cache_med.key_length = 256
149 beaker.cache.sql_cache_med.key_length = 256
150
150
151 beaker.cache.sql_cache_long.type=file
151 beaker.cache.sql_cache_long.type=file
152 beaker.cache.sql_cache_long.expire=3600
152 beaker.cache.sql_cache_long.expire=3600
153 beaker.cache.sql_cache_long.key_length = 256
153 beaker.cache.sql_cache_long.key_length = 256
154
154
155 ####################################
155 ####################################
156 ### BEAKER SESSION ####
156 ### BEAKER SESSION ####
157 ####################################
157 ####################################
158 ## Type of storage used for the session, current types are
158 ## Type of storage used for the session, current types are
159 ## dbm, file, memcached, database, and memory.
159 ## dbm, file, memcached, database, and memory.
160 ## The storage uses the Container API
160 ## The storage uses the Container API
161 ## that is also used by the cache system.
161 ## that is also used by the cache system.
162
162
163 ## db session example
163 ## db session example
164
164
165 #beaker.session.type = ext:database
165 #beaker.session.type = ext:database
166 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
166 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
167 #beaker.session.table_name = db_session
167 #beaker.session.table_name = db_session
168
168
169 ## encrypted cookie session, good for many instances
169 ## encrypted cookie session, good for many instances
170 #beaker.session.type = cookie
170 #beaker.session.type = cookie
171
171
172 beaker.session.type = file
172 beaker.session.type = file
173 beaker.session.key = rhodecode
173 beaker.session.key = rhodecode
174 # secure cookie requires AES python libraries
174 # secure cookie requires AES python libraries
175 #beaker.session.encrypt_key = ${app_instance_secret}
175 #beaker.session.encrypt_key = ${app_instance_secret}
176 #beaker.session.validate_key = ${app_instance_secret}
176 #beaker.session.validate_key = ${app_instance_secret}
177 beaker.session.timeout = 36000
177 beaker.session.timeout = 36000
178 beaker.session.httponly = true
178 beaker.session.httponly = true
179
179
180 ## uncomment for https secure cookie
180 ## uncomment for https secure cookie
181 beaker.session.secure = false
181 beaker.session.secure = false
182
182
183 ##auto save the session to not to use .save()
183 ##auto save the session to not to use .save()
184 beaker.session.auto = False
184 beaker.session.auto = False
185
185
186 ##true exire at browser close
186 ##true exire at browser close
187 #beaker.session.cookie_expires = 3600
187 #beaker.session.cookie_expires = 3600
188
188
189
189
190 ################################################################################
190 ################################################################################
191 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
191 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
192 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
192 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
193 ## execute malicious code after an exception is raised. ##
193 ## execute malicious code after an exception is raised. ##
194 ################################################################################
194 ################################################################################
195 set debug = false
195 set debug = false
196
196
197 ##################################
197 ##################################
198 ### LOGVIEW CONFIG ###
198 ### LOGVIEW CONFIG ###
199 ##################################
199 ##################################
200 logview.sqlalchemy = #faa
200 logview.sqlalchemy = #faa
201 logview.pylons.templating = #bfb
201 logview.pylons.templating = #bfb
202 logview.pylons.util = #eee
202 logview.pylons.util = #eee
203
203
204 #########################################################
204 #########################################################
205 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
205 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
206 #########################################################
206 #########################################################
207
207
208 # SQLITE [default]
208 # SQLITE [default]
209 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
209 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
210
210
211 # POSTGRESQL
211 # POSTGRESQL
212 # sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode
212 # sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode
213
213
214 # MySQL
214 # MySQL
215 # sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode
215 # sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode
216
216
217 # see sqlalchemy docs for others
217 # see sqlalchemy docs for others
218
218
219 sqlalchemy.db1.echo = false
219 sqlalchemy.db1.echo = false
220 sqlalchemy.db1.pool_recycle = 3600
220 sqlalchemy.db1.pool_recycle = 3600
221 sqlalchemy.convert_unicode = true
221 sqlalchemy.convert_unicode = true
222
222
223 ################################
223 ################################
224 ### LOGGING CONFIGURATION ####
224 ### LOGGING CONFIGURATION ####
225 ################################
225 ################################
226 [loggers]
226 [loggers]
227 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
227 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
228
228
229 [handlers]
229 [handlers]
230 keys = console, console_sql
230 keys = console, console_sql
231
231
232 [formatters]
232 [formatters]
233 keys = generic, color_formatter, color_formatter_sql
233 keys = generic, color_formatter, color_formatter_sql
234
234
235 #############
235 #############
236 ## LOGGERS ##
236 ## LOGGERS ##
237 #############
237 #############
238 [logger_root]
238 [logger_root]
239 level = NOTSET
239 level = NOTSET
240 handlers = console
240 handlers = console
241
241
242 [logger_routes]
242 [logger_routes]
243 level = DEBUG
243 level = DEBUG
244 handlers =
244 handlers =
245 qualname = routes.middleware
245 qualname = routes.middleware
246 # "level = DEBUG" logs the route matched and routing variables.
246 # "level = DEBUG" logs the route matched and routing variables.
247 propagate = 1
247 propagate = 1
248
248
249 [logger_beaker]
249 [logger_beaker]
250 level = DEBUG
250 level = DEBUG
251 handlers =
251 handlers =
252 qualname = beaker.container
252 qualname = beaker.container
253 propagate = 1
253 propagate = 1
254
254
255 [logger_templates]
255 [logger_templates]
256 level = INFO
256 level = INFO
257 handlers =
257 handlers =
258 qualname = pylons.templating
258 qualname = pylons.templating
259 propagate = 1
259 propagate = 1
260
260
261 [logger_rhodecode]
261 [logger_rhodecode]
262 level = DEBUG
262 level = DEBUG
263 handlers =
263 handlers =
264 qualname = rhodecode
264 qualname = rhodecode
265 propagate = 1
265 propagate = 1
266
266
267 [logger_sqlalchemy]
267 [logger_sqlalchemy]
268 level = INFO
268 level = INFO
269 handlers = console_sql
269 handlers = console_sql
270 qualname = sqlalchemy.engine
270 qualname = sqlalchemy.engine
271 propagate = 0
271 propagate = 0
272
272
273 [logger_whoosh_indexer]
274 level = DEBUG
275 handlers =
276 qualname = whoosh_indexer
277 propagate = 1
278
273 ##############
279 ##############
274 ## HANDLERS ##
280 ## HANDLERS ##
275 ##############
281 ##############
276
282
277 [handler_console]
283 [handler_console]
278 class = StreamHandler
284 class = StreamHandler
279 args = (sys.stderr,)
285 args = (sys.stderr,)
280 level = INFO
286 level = INFO
281 formatter = generic
287 formatter = generic
282
288
283 [handler_console_sql]
289 [handler_console_sql]
284 class = StreamHandler
290 class = StreamHandler
285 args = (sys.stderr,)
291 args = (sys.stderr,)
286 level = WARN
292 level = WARN
287 formatter = generic
293 formatter = generic
288
294
289 ################
295 ################
290 ## FORMATTERS ##
296 ## FORMATTERS ##
291 ################
297 ################
292
298
293 [formatter_generic]
299 [formatter_generic]
294 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
300 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
295 datefmt = %Y-%m-%d %H:%M:%S
301 datefmt = %Y-%m-%d %H:%M:%S
296
302
297 [formatter_color_formatter]
303 [formatter_color_formatter]
298 class=rhodecode.lib.colored_formatter.ColorFormatter
304 class=rhodecode.lib.colored_formatter.ColorFormatter
299 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
305 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
300 datefmt = %Y-%m-%d %H:%M:%S
306 datefmt = %Y-%m-%d %H:%M:%S
301
307
302 [formatter_color_formatter_sql]
308 [formatter_color_formatter_sql]
303 class=rhodecode.lib.colored_formatter.ColorFormatterSql
309 class=rhodecode.lib.colored_formatter.ColorFormatterSql
304 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
310 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
305 datefmt = %Y-%m-%d %H:%M:%S
311 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,229 +1,230 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.indexers.__init__
3 rhodecode.lib.indexers.__init__
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 Whoosh indexing module for RhodeCode
6 Whoosh indexing module for RhodeCode
7
7
8 :created_on: Aug 17, 2010
8 :created_on: Aug 17, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 import os
25 import os
26 import sys
26 import sys
27 import traceback
27 import traceback
28 import logging
28 from os.path import dirname as dn, join as jn
29 from os.path import dirname as dn, join as jn
29
30
30 #to get the rhodecode import
31 #to get the rhodecode import
31 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
32 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
32
33
33 from string import strip
34 from string import strip
34 from shutil import rmtree
35 from shutil import rmtree
35
36
36 from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
37 from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
37 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
38 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
38 from whoosh.index import create_in, open_dir
39 from whoosh.index import create_in, open_dir
39 from whoosh.formats import Characters
40 from whoosh.formats import Characters
40 from whoosh.highlight import highlight, HtmlFormatter, ContextFragmenter
41 from whoosh.highlight import highlight, HtmlFormatter, ContextFragmenter
41
42
42 from webhelpers.html.builder import escape
43 from webhelpers.html.builder import escape
43 from sqlalchemy import engine_from_config
44 from sqlalchemy import engine_from_config
44
45
45 from rhodecode.model import init_model
46 from rhodecode.model import init_model
46 from rhodecode.model.scm import ScmModel
47 from rhodecode.model.scm import ScmModel
47 from rhodecode.model.repo import RepoModel
48 from rhodecode.model.repo import RepoModel
48 from rhodecode.config.environment import load_environment
49 from rhodecode.config.environment import load_environment
49 from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, LazyProperty
50 from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, LazyProperty
50 from rhodecode.lib.utils import BasePasterCommand, Command, add_cache
51 from rhodecode.lib.utils import BasePasterCommand, Command, add_cache
51
52
52 # EXTENSIONS WE WANT TO INDEX CONTENT OFF
53 # EXTENSIONS WE WANT TO INDEX CONTENT OFF
53 INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys()
54 INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys()
54
55
55 # CUSTOM ANALYZER wordsplit + lowercase filter
56 # CUSTOM ANALYZER wordsplit + lowercase filter
56 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
57 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
57
58
58
59
59 #INDEX SCHEMA DEFINITION
60 #INDEX SCHEMA DEFINITION
60 SCHEMA = Schema(
61 SCHEMA = Schema(
61 owner=TEXT(),
62 owner=TEXT(),
62 repository=TEXT(stored=True),
63 repository=TEXT(stored=True),
63 path=TEXT(stored=True),
64 path=TEXT(stored=True),
64 content=FieldType(format=Characters(), analyzer=ANALYZER,
65 content=FieldType(format=Characters(), analyzer=ANALYZER,
65 scorable=True, stored=True),
66 scorable=True, stored=True),
66 modtime=STORED(),
67 modtime=STORED(),
67 extension=TEXT(stored=True)
68 extension=TEXT(stored=True)
68 )
69 )
69
70
70 IDX_NAME = 'HG_INDEX'
71 IDX_NAME = 'HG_INDEX'
71 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
72 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
72 FRAGMENTER = ContextFragmenter(200)
73 FRAGMENTER = ContextFragmenter(200)
73
74
74
75
75 class MakeIndex(BasePasterCommand):
76 class MakeIndex(BasePasterCommand):
76
77
77 max_args = 1
78 max_args = 1
78 min_args = 1
79 min_args = 1
79
80
80 usage = "CONFIG_FILE"
81 usage = "CONFIG_FILE"
81 summary = "Creates index for full text search given configuration file"
82 summary = "Creates index for full text search given configuration file"
82 group_name = "RhodeCode"
83 group_name = "RhodeCode"
83 takes_config_file = -1
84 takes_config_file = -1
84 parser = Command.standard_parser(verbose=True)
85 parser = Command.standard_parser(verbose=True)
85
86
86 def command(self):
87 def command(self):
87
88 logging.config.fileConfig(self.path_to_ini_file)
88 from pylons import config
89 from pylons import config
89 add_cache(config)
90 add_cache(config)
90 engine = engine_from_config(config, 'sqlalchemy.db1.')
91 engine = engine_from_config(config, 'sqlalchemy.db1.')
91 init_model(engine)
92 init_model(engine)
92
93
93 index_location = config['index_dir']
94 index_location = config['index_dir']
94 repo_location = self.options.repo_location \
95 repo_location = self.options.repo_location \
95 if self.options.repo_location else RepoModel().repos_path
96 if self.options.repo_location else RepoModel().repos_path
96 repo_list = map(strip, self.options.repo_list.split(',')) \
97 repo_list = map(strip, self.options.repo_list.split(',')) \
97 if self.options.repo_list else None
98 if self.options.repo_list else None
98
99
99 #======================================================================
100 #======================================================================
100 # WHOOSH DAEMON
101 # WHOOSH DAEMON
101 #======================================================================
102 #======================================================================
102 from rhodecode.lib.pidlock import LockHeld, DaemonLock
103 from rhodecode.lib.pidlock import LockHeld, DaemonLock
103 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
104 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
104 try:
105 try:
105 l = DaemonLock(file_=jn(dn(dn(index_location)), 'make_index.lock'))
106 l = DaemonLock(file_=jn(dn(dn(index_location)), 'make_index.lock'))
106 WhooshIndexingDaemon(index_location=index_location,
107 WhooshIndexingDaemon(index_location=index_location,
107 repo_location=repo_location,
108 repo_location=repo_location,
108 repo_list=repo_list)\
109 repo_list=repo_list)\
109 .run(full_index=self.options.full_index)
110 .run(full_index=self.options.full_index)
110 l.release()
111 l.release()
111 except LockHeld:
112 except LockHeld:
112 sys.exit(1)
113 sys.exit(1)
113
114
114 def update_parser(self):
115 def update_parser(self):
115 self.parser.add_option('--repo-location',
116 self.parser.add_option('--repo-location',
116 action='store',
117 action='store',
117 dest='repo_location',
118 dest='repo_location',
118 help="Specifies repositories location to index OPTIONAL",
119 help="Specifies repositories location to index OPTIONAL",
119 )
120 )
120 self.parser.add_option('--index-only',
121 self.parser.add_option('--index-only',
121 action='store',
122 action='store',
122 dest='repo_list',
123 dest='repo_list',
123 help="Specifies a comma separated list of repositores "
124 help="Specifies a comma separated list of repositores "
124 "to build index on OPTIONAL",
125 "to build index on OPTIONAL",
125 )
126 )
126 self.parser.add_option('-f',
127 self.parser.add_option('-f',
127 action='store_true',
128 action='store_true',
128 dest='full_index',
129 dest='full_index',
129 help="Specifies that index should be made full i.e"
130 help="Specifies that index should be made full i.e"
130 " destroy old and build from scratch",
131 " destroy old and build from scratch",
131 default=False)
132 default=False)
132
133
133
134
134 class ResultWrapper(object):
135 class ResultWrapper(object):
135 def __init__(self, search_type, searcher, matcher, highlight_items):
136 def __init__(self, search_type, searcher, matcher, highlight_items):
136 self.search_type = search_type
137 self.search_type = search_type
137 self.searcher = searcher
138 self.searcher = searcher
138 self.matcher = matcher
139 self.matcher = matcher
139 self.highlight_items = highlight_items
140 self.highlight_items = highlight_items
140 self.fragment_size = 200
141 self.fragment_size = 200
141
142
142 @LazyProperty
143 @LazyProperty
143 def doc_ids(self):
144 def doc_ids(self):
144 docs_id = []
145 docs_id = []
145 while self.matcher.is_active():
146 while self.matcher.is_active():
146 docnum = self.matcher.id()
147 docnum = self.matcher.id()
147 chunks = [offsets for offsets in self.get_chunks()]
148 chunks = [offsets for offsets in self.get_chunks()]
148 docs_id.append([docnum, chunks])
149 docs_id.append([docnum, chunks])
149 self.matcher.next()
150 self.matcher.next()
150 return docs_id
151 return docs_id
151
152
152 def __str__(self):
153 def __str__(self):
153 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
154 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
154
155
155 def __repr__(self):
156 def __repr__(self):
156 return self.__str__()
157 return self.__str__()
157
158
158 def __len__(self):
159 def __len__(self):
159 return len(self.doc_ids)
160 return len(self.doc_ids)
160
161
161 def __iter__(self):
162 def __iter__(self):
162 """
163 """
163 Allows Iteration over results,and lazy generate content
164 Allows Iteration over results,and lazy generate content
164
165
165 *Requires* implementation of ``__getitem__`` method.
166 *Requires* implementation of ``__getitem__`` method.
166 """
167 """
167 for docid in self.doc_ids:
168 for docid in self.doc_ids:
168 yield self.get_full_content(docid)
169 yield self.get_full_content(docid)
169
170
170 def __getitem__(self, key):
171 def __getitem__(self, key):
171 """
172 """
172 Slicing of resultWrapper
173 Slicing of resultWrapper
173 """
174 """
174 i, j = key.start, key.stop
175 i, j = key.start, key.stop
175
176
176 slices = []
177 slices = []
177 for docid in self.doc_ids[i:j]:
178 for docid in self.doc_ids[i:j]:
178 slices.append(self.get_full_content(docid))
179 slices.append(self.get_full_content(docid))
179 return slices
180 return slices
180
181
181 def get_full_content(self, docid):
182 def get_full_content(self, docid):
182 res = self.searcher.stored_fields(docid[0])
183 res = self.searcher.stored_fields(docid[0])
183 f_path = res['path'][res['path'].find(res['repository']) \
184 f_path = res['path'][res['path'].find(res['repository']) \
184 + len(res['repository']):].lstrip('/')
185 + len(res['repository']):].lstrip('/')
185
186
186 content_short = self.get_short_content(res, docid[1])
187 content_short = self.get_short_content(res, docid[1])
187 res.update({'content_short': content_short,
188 res.update({'content_short': content_short,
188 'content_short_hl': self.highlight(content_short),
189 'content_short_hl': self.highlight(content_short),
189 'f_path': f_path})
190 'f_path': f_path})
190
191
191 return res
192 return res
192
193
193 def get_short_content(self, res, chunks):
194 def get_short_content(self, res, chunks):
194
195
195 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
196 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
196
197
197 def get_chunks(self):
198 def get_chunks(self):
198 """
199 """
199 Smart function that implements chunking the content
200 Smart function that implements chunking the content
200 but not overlap chunks so it doesn't highlight the same
201 but not overlap chunks so it doesn't highlight the same
201 close occurrences twice.
202 close occurrences twice.
202
203
203 :param matcher:
204 :param matcher:
204 :param size:
205 :param size:
205 """
206 """
206 memory = [(0, 0)]
207 memory = [(0, 0)]
207 for span in self.matcher.spans():
208 for span in self.matcher.spans():
208 start = span.startchar or 0
209 start = span.startchar or 0
209 end = span.endchar or 0
210 end = span.endchar or 0
210 start_offseted = max(0, start - self.fragment_size)
211 start_offseted = max(0, start - self.fragment_size)
211 end_offseted = end + self.fragment_size
212 end_offseted = end + self.fragment_size
212
213
213 if start_offseted < memory[-1][1]:
214 if start_offseted < memory[-1][1]:
214 start_offseted = memory[-1][1]
215 start_offseted = memory[-1][1]
215 memory.append((start_offseted, end_offseted,))
216 memory.append((start_offseted, end_offseted,))
216 yield (start_offseted, end_offseted,)
217 yield (start_offseted, end_offseted,)
217
218
218 def highlight(self, content, top=5):
219 def highlight(self, content, top=5):
219 if self.search_type != 'content':
220 if self.search_type != 'content':
220 return ''
221 return ''
221 hl = highlight(
222 hl = highlight(
222 text=escape(content),
223 text=escape(content),
223 terms=self.highlight_items,
224 terms=self.highlight_items,
224 analyzer=ANALYZER,
225 analyzer=ANALYZER,
225 fragmenter=FRAGMENTER,
226 fragmenter=FRAGMENTER,
226 formatter=FORMATTER,
227 formatter=FORMATTER,
227 top=top
228 top=top
228 )
229 )
229 return hl
230 return hl
@@ -1,634 +1,634 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 from os.path import abspath
35 from os.path import abspath
36 from os.path import dirname as dn, join as jn
36 from os.path import dirname as dn, join as jn
37
37
38 from paste.script.command import Command, BadCommand
38 from paste.script.command import Command, BadCommand
39
39
40 from mercurial import ui, config
40 from mercurial import ui, config
41
41
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
43
43
44 from rhodecode.lib.vcs import get_backend
44 from rhodecode.lib.vcs import get_backend
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
47 from rhodecode.lib.vcs.utils.helpers import get_scm
47 from rhodecode.lib.vcs.utils.helpers import get_scm
48 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.exceptions import VCSError
49
49
50 from rhodecode.lib.caching_query import FromCache
50 from rhodecode.lib.caching_query import FromCache
51
51
52 from rhodecode.model import meta
52 from rhodecode.model import meta
53 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
53 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
54 UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm
54 UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.repos_group import ReposGroupModel
56 from rhodecode.model.repos_group import ReposGroupModel
57 from rhodecode.lib import safe_str, safe_unicode
57 from rhodecode.lib import safe_str, safe_unicode
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62
62
63
63
64 def recursive_replace(str_, replace=' '):
64 def recursive_replace(str_, replace=' '):
65 """Recursive replace of given sign to just one instance
65 """Recursive replace of given sign to just one instance
66
66
67 :param str_: given string
67 :param str_: given string
68 :param replace: char to find and replace multiple instances
68 :param replace: char to find and replace multiple instances
69
69
70 Examples::
70 Examples::
71 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
71 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
72 'Mighty-Mighty-Bo-sstones'
72 'Mighty-Mighty-Bo-sstones'
73 """
73 """
74
74
75 if str_.find(replace * 2) == -1:
75 if str_.find(replace * 2) == -1:
76 return str_
76 return str_
77 else:
77 else:
78 str_ = str_.replace(replace * 2, replace)
78 str_ = str_.replace(replace * 2, replace)
79 return recursive_replace(str_, replace)
79 return recursive_replace(str_, replace)
80
80
81
81
82 def repo_name_slug(value):
82 def repo_name_slug(value):
83 """Return slug of name of repository
83 """Return slug of name of repository
84 This function is called on each creation/modification
84 This function is called on each creation/modification
85 of repository to prevent bad names in repo
85 of repository to prevent bad names in repo
86 """
86 """
87
87
88 slug = remove_formatting(value)
88 slug = remove_formatting(value)
89 slug = strip_tags(slug)
89 slug = strip_tags(slug)
90
90
91 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
91 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
92 slug = slug.replace(c, '-')
92 slug = slug.replace(c, '-')
93 slug = recursive_replace(slug, '-')
93 slug = recursive_replace(slug, '-')
94 slug = collapse(slug, '-')
94 slug = collapse(slug, '-')
95 return slug
95 return slug
96
96
97
97
98 def get_repo_slug(request):
98 def get_repo_slug(request):
99 _repo = request.environ['pylons.routes_dict'].get('repo_name')
99 _repo = request.environ['pylons.routes_dict'].get('repo_name')
100 if _repo:
100 if _repo:
101 _repo = _repo.rstrip('/')
101 _repo = _repo.rstrip('/')
102 return _repo
102 return _repo
103
103
104
104
105 def get_repos_group_slug(request):
105 def get_repos_group_slug(request):
106 _group = request.environ['pylons.routes_dict'].get('group_name')
106 _group = request.environ['pylons.routes_dict'].get('group_name')
107 if _group:
107 if _group:
108 _group = _group.rstrip('/')
108 _group = _group.rstrip('/')
109 return _group
109 return _group
110
110
111
111
112 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
112 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
113 """
113 """
114 Action logger for various actions made by users
114 Action logger for various actions made by users
115
115
116 :param user: user that made this action, can be a unique username string or
116 :param user: user that made this action, can be a unique username string or
117 object containing user_id attribute
117 object containing user_id attribute
118 :param action: action to log, should be on of predefined unique actions for
118 :param action: action to log, should be on of predefined unique actions for
119 easy translations
119 easy translations
120 :param repo: string name of repository or object containing repo_id,
120 :param repo: string name of repository or object containing repo_id,
121 that action was made on
121 that action was made on
122 :param ipaddr: optional ip address from what the action was made
122 :param ipaddr: optional ip address from what the action was made
123 :param sa: optional sqlalchemy session
123 :param sa: optional sqlalchemy session
124
124
125 """
125 """
126
126
127 if not sa:
127 if not sa:
128 sa = meta.Session
128 sa = meta.Session
129
129
130 try:
130 try:
131 if hasattr(user, 'user_id'):
131 if hasattr(user, 'user_id'):
132 user_obj = user
132 user_obj = user
133 elif isinstance(user, basestring):
133 elif isinstance(user, basestring):
134 user_obj = User.get_by_username(user)
134 user_obj = User.get_by_username(user)
135 else:
135 else:
136 raise Exception('You have to provide user object or username')
136 raise Exception('You have to provide user object or username')
137
137
138 if hasattr(repo, 'repo_id'):
138 if hasattr(repo, 'repo_id'):
139 repo_obj = Repository.get(repo.repo_id)
139 repo_obj = Repository.get(repo.repo_id)
140 repo_name = repo_obj.repo_name
140 repo_name = repo_obj.repo_name
141 elif isinstance(repo, basestring):
141 elif isinstance(repo, basestring):
142 repo_name = repo.lstrip('/')
142 repo_name = repo.lstrip('/')
143 repo_obj = Repository.get_by_repo_name(repo_name)
143 repo_obj = Repository.get_by_repo_name(repo_name)
144 else:
144 else:
145 raise Exception('You have to provide repository to action logger')
145 raise Exception('You have to provide repository to action logger')
146
146
147 user_log = UserLog()
147 user_log = UserLog()
148 user_log.user_id = user_obj.user_id
148 user_log.user_id = user_obj.user_id
149 user_log.action = action
149 user_log.action = action
150
150
151 user_log.repository_id = repo_obj.repo_id
151 user_log.repository_id = repo_obj.repo_id
152 user_log.repository_name = repo_name
152 user_log.repository_name = repo_name
153
153
154 user_log.action_date = datetime.datetime.now()
154 user_log.action_date = datetime.datetime.now()
155 user_log.user_ip = ipaddr
155 user_log.user_ip = ipaddr
156 sa.add(user_log)
156 sa.add(user_log)
157
157
158 log.info(
158 log.info(
159 'Adding user %s, action %s on %s' % (user_obj, action,
159 'Adding user %s, action %s on %s' % (user_obj, action,
160 safe_unicode(repo))
160 safe_unicode(repo))
161 )
161 )
162 if commit:
162 if commit:
163 sa.commit()
163 sa.commit()
164 except:
164 except:
165 log.error(traceback.format_exc())
165 log.error(traceback.format_exc())
166 raise
166 raise
167
167
168
168
169 def get_repos(path, recursive=False):
169 def get_repos(path, recursive=False):
170 """
170 """
171 Scans given path for repos and return (name,(type,path)) tuple
171 Scans given path for repos and return (name,(type,path)) tuple
172
172
173 :param path: path to scan for repositories
173 :param path: path to scan for repositories
174 :param recursive: recursive search and return names with subdirs in front
174 :param recursive: recursive search and return names with subdirs in front
175 """
175 """
176
176
177 # remove ending slash for better results
177 # remove ending slash for better results
178 path = path.rstrip(os.sep)
178 path = path.rstrip(os.sep)
179
179
180 def _get_repos(p):
180 def _get_repos(p):
181 if not os.access(p, os.W_OK):
181 if not os.access(p, os.W_OK):
182 return
182 return
183 for dirpath in os.listdir(p):
183 for dirpath in os.listdir(p):
184 if os.path.isfile(os.path.join(p, dirpath)):
184 if os.path.isfile(os.path.join(p, dirpath)):
185 continue
185 continue
186 cur_path = os.path.join(p, dirpath)
186 cur_path = os.path.join(p, dirpath)
187 try:
187 try:
188 scm_info = get_scm(cur_path)
188 scm_info = get_scm(cur_path)
189 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
189 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
190 except VCSError:
190 except VCSError:
191 if not recursive:
191 if not recursive:
192 continue
192 continue
193 #check if this dir containts other repos for recursive scan
193 #check if this dir containts other repos for recursive scan
194 rec_path = os.path.join(p, dirpath)
194 rec_path = os.path.join(p, dirpath)
195 if os.path.isdir(rec_path):
195 if os.path.isdir(rec_path):
196 for inner_scm in _get_repos(rec_path):
196 for inner_scm in _get_repos(rec_path):
197 yield inner_scm
197 yield inner_scm
198
198
199 return _get_repos(path)
199 return _get_repos(path)
200
200
201
201
202 def is_valid_repo(repo_name, base_path):
202 def is_valid_repo(repo_name, base_path):
203 """
203 """
204 Returns True if given path is a valid repository False otherwise
204 Returns True if given path is a valid repository False otherwise
205
205
206 :param repo_name:
206 :param repo_name:
207 :param base_path:
207 :param base_path:
208
208
209 :return True: if given path is a valid repository
209 :return True: if given path is a valid repository
210 """
210 """
211 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
211 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
212
212
213 try:
213 try:
214 get_scm(full_path)
214 get_scm(full_path)
215 return True
215 return True
216 except VCSError:
216 except VCSError:
217 return False
217 return False
218
218
219
219
220 def is_valid_repos_group(repos_group_name, base_path):
220 def is_valid_repos_group(repos_group_name, base_path):
221 """
221 """
222 Returns True if given path is a repos group False otherwise
222 Returns True if given path is a repos group False otherwise
223
223
224 :param repo_name:
224 :param repo_name:
225 :param base_path:
225 :param base_path:
226 """
226 """
227 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
227 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
228
228
229 # check if it's not a repo
229 # check if it's not a repo
230 if is_valid_repo(repos_group_name, base_path):
230 if is_valid_repo(repos_group_name, base_path):
231 return False
231 return False
232
232
233 # check if it's a valid path
233 # check if it's a valid path
234 if os.path.isdir(full_path):
234 if os.path.isdir(full_path):
235 return True
235 return True
236
236
237 return False
237 return False
238
238
239
239
240 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
240 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
241 while True:
241 while True:
242 ok = raw_input(prompt)
242 ok = raw_input(prompt)
243 if ok in ('y', 'ye', 'yes'):
243 if ok in ('y', 'ye', 'yes'):
244 return True
244 return True
245 if ok in ('n', 'no', 'nop', 'nope'):
245 if ok in ('n', 'no', 'nop', 'nope'):
246 return False
246 return False
247 retries = retries - 1
247 retries = retries - 1
248 if retries < 0:
248 if retries < 0:
249 raise IOError
249 raise IOError
250 print complaint
250 print complaint
251
251
252 #propagated from mercurial documentation
252 #propagated from mercurial documentation
253 ui_sections = ['alias', 'auth',
253 ui_sections = ['alias', 'auth',
254 'decode/encode', 'defaults',
254 'decode/encode', 'defaults',
255 'diff', 'email',
255 'diff', 'email',
256 'extensions', 'format',
256 'extensions', 'format',
257 'merge-patterns', 'merge-tools',
257 'merge-patterns', 'merge-tools',
258 'hooks', 'http_proxy',
258 'hooks', 'http_proxy',
259 'smtp', 'patch',
259 'smtp', 'patch',
260 'paths', 'profiling',
260 'paths', 'profiling',
261 'server', 'trusted',
261 'server', 'trusted',
262 'ui', 'web', ]
262 'ui', 'web', ]
263
263
264
264
265 def make_ui(read_from='file', path=None, checkpaths=True):
265 def make_ui(read_from='file', path=None, checkpaths=True):
266 """A function that will read python rc files or database
266 """A function that will read python rc files or database
267 and make an mercurial ui object from read options
267 and make an mercurial ui object from read options
268
268
269 :param path: path to mercurial config file
269 :param path: path to mercurial config file
270 :param checkpaths: check the path
270 :param checkpaths: check the path
271 :param read_from: read from 'file' or 'db'
271 :param read_from: read from 'file' or 'db'
272 """
272 """
273
273
274 baseui = ui.ui()
274 baseui = ui.ui()
275
275
276 # clean the baseui object
276 # clean the baseui object
277 baseui._ocfg = config.config()
277 baseui._ocfg = config.config()
278 baseui._ucfg = config.config()
278 baseui._ucfg = config.config()
279 baseui._tcfg = config.config()
279 baseui._tcfg = config.config()
280
280
281 if read_from == 'file':
281 if read_from == 'file':
282 if not os.path.isfile(path):
282 if not os.path.isfile(path):
283 log.debug('hgrc file is not present at %s skipping...' % path)
283 log.debug('hgrc file is not present at %s skipping...' % path)
284 return False
284 return False
285 log.debug('reading hgrc from %s' % path)
285 log.debug('reading hgrc from %s' % path)
286 cfg = config.config()
286 cfg = config.config()
287 cfg.read(path)
287 cfg.read(path)
288 for section in ui_sections:
288 for section in ui_sections:
289 for k, v in cfg.items(section):
289 for k, v in cfg.items(section):
290 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
290 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
291 baseui.setconfig(section, k, v)
291 baseui.setconfig(section, k, v)
292
292
293 elif read_from == 'db':
293 elif read_from == 'db':
294 sa = meta.Session
294 sa = meta.Session
295 ret = sa.query(RhodeCodeUi)\
295 ret = sa.query(RhodeCodeUi)\
296 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
296 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
297 .all()
297 .all()
298
298
299 hg_ui = ret
299 hg_ui = ret
300 for ui_ in hg_ui:
300 for ui_ in hg_ui:
301 if ui_.ui_active:
301 if ui_.ui_active:
302 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
302 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
303 ui_.ui_key, ui_.ui_value)
303 ui_.ui_key, ui_.ui_value)
304 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
304 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
305
305
306 meta.Session.remove()
306 meta.Session.remove()
307 return baseui
307 return baseui
308
308
309
309
310 def set_rhodecode_config(config):
310 def set_rhodecode_config(config):
311 """
311 """
312 Updates pylons config with new settings from database
312 Updates pylons config with new settings from database
313
313
314 :param config:
314 :param config:
315 """
315 """
316 hgsettings = RhodeCodeSetting.get_app_settings()
316 hgsettings = RhodeCodeSetting.get_app_settings()
317
317
318 for k, v in hgsettings.items():
318 for k, v in hgsettings.items():
319 config[k] = v
319 config[k] = v
320
320
321
321
322 def invalidate_cache(cache_key, *args):
322 def invalidate_cache(cache_key, *args):
323 """
323 """
324 Puts cache invalidation task into db for
324 Puts cache invalidation task into db for
325 further global cache invalidation
325 further global cache invalidation
326 """
326 """
327
327
328 from rhodecode.model.scm import ScmModel
328 from rhodecode.model.scm import ScmModel
329
329
330 if cache_key.startswith('get_repo_cached_'):
330 if cache_key.startswith('get_repo_cached_'):
331 name = cache_key.split('get_repo_cached_')[-1]
331 name = cache_key.split('get_repo_cached_')[-1]
332 ScmModel().mark_for_invalidation(name)
332 ScmModel().mark_for_invalidation(name)
333
333
334
334
335 class EmptyChangeset(BaseChangeset):
335 class EmptyChangeset(BaseChangeset):
336 """
336 """
337 An dummy empty changeset. It's possible to pass hash when creating
337 An dummy empty changeset. It's possible to pass hash when creating
338 an EmptyChangeset
338 an EmptyChangeset
339 """
339 """
340
340
341 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
341 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
342 alias=None):
342 alias=None):
343 self._empty_cs = cs
343 self._empty_cs = cs
344 self.revision = -1
344 self.revision = -1
345 self.message = ''
345 self.message = ''
346 self.author = ''
346 self.author = ''
347 self.date = ''
347 self.date = ''
348 self.repository = repo
348 self.repository = repo
349 self.requested_revision = requested_revision
349 self.requested_revision = requested_revision
350 self.alias = alias
350 self.alias = alias
351
351
352 @LazyProperty
352 @LazyProperty
353 def raw_id(self):
353 def raw_id(self):
354 """
354 """
355 Returns raw string identifying this changeset, useful for web
355 Returns raw string identifying this changeset, useful for web
356 representation.
356 representation.
357 """
357 """
358
358
359 return self._empty_cs
359 return self._empty_cs
360
360
361 @LazyProperty
361 @LazyProperty
362 def branch(self):
362 def branch(self):
363 return get_backend(self.alias).DEFAULT_BRANCH_NAME
363 return get_backend(self.alias).DEFAULT_BRANCH_NAME
364
364
365 @LazyProperty
365 @LazyProperty
366 def short_id(self):
366 def short_id(self):
367 return self.raw_id[:12]
367 return self.raw_id[:12]
368
368
369 def get_file_changeset(self, path):
369 def get_file_changeset(self, path):
370 return self
370 return self
371
371
372 def get_file_content(self, path):
372 def get_file_content(self, path):
373 return u''
373 return u''
374
374
375 def get_file_size(self, path):
375 def get_file_size(self, path):
376 return 0
376 return 0
377
377
378
378
379 def map_groups(groups):
379 def map_groups(groups):
380 """
380 """
381 Checks for groups existence, and creates groups structures.
381 Checks for groups existence, and creates groups structures.
382 It returns last group in structure
382 It returns last group in structure
383
383
384 :param groups: list of groups structure
384 :param groups: list of groups structure
385 """
385 """
386 sa = meta.Session
386 sa = meta.Session
387
387
388 parent = None
388 parent = None
389 group = None
389 group = None
390
390
391 # last element is repo in nested groups structure
391 # last element is repo in nested groups structure
392 groups = groups[:-1]
392 groups = groups[:-1]
393 rgm = ReposGroupModel(sa)
393 rgm = ReposGroupModel(sa)
394 for lvl, group_name in enumerate(groups):
394 for lvl, group_name in enumerate(groups):
395 group_name = '/'.join(groups[:lvl] + [group_name])
395 group_name = '/'.join(groups[:lvl] + [group_name])
396 group = RepoGroup.get_by_group_name(group_name)
396 group = RepoGroup.get_by_group_name(group_name)
397 desc = '%s group' % group_name
397 desc = '%s group' % group_name
398
398
399 # # WTF that doesn't work !?
399 # # WTF that doesn't work !?
400 # if group is None:
400 # if group is None:
401 # group = rgm.create(group_name, desc, parent, just_db=True)
401 # group = rgm.create(group_name, desc, parent, just_db=True)
402 # sa.commit()
402 # sa.commit()
403
403
404 # skip folders that are now removed repos
404 # skip folders that are now removed repos
405 if REMOVED_REPO_PAT.match(group_name):
405 if REMOVED_REPO_PAT.match(group_name):
406 break
406 break
407
407
408 if group is None:
408 if group is None:
409 log.debug('creating group level: %s group_name: %s' % (lvl, group_name))
409 log.debug('creating group level: %s group_name: %s' % (lvl, group_name))
410 group = RepoGroup(group_name, parent)
410 group = RepoGroup(group_name, parent)
411 group.group_description = desc
411 group.group_description = desc
412 sa.add(group)
412 sa.add(group)
413 rgm._create_default_perms(group)
413 rgm._create_default_perms(group)
414 sa.commit()
414 sa.commit()
415 parent = group
415 parent = group
416 return group
416 return group
417
417
418
418
419 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
419 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
420 """
420 """
421 maps all repos given in initial_repo_list, non existing repositories
421 maps all repos given in initial_repo_list, non existing repositories
422 are created, if remove_obsolete is True it also check for db entries
422 are created, if remove_obsolete is True it also check for db entries
423 that are not in initial_repo_list and removes them.
423 that are not in initial_repo_list and removes them.
424
424
425 :param initial_repo_list: list of repositories found by scanning methods
425 :param initial_repo_list: list of repositories found by scanning methods
426 :param remove_obsolete: check for obsolete entries in database
426 :param remove_obsolete: check for obsolete entries in database
427 """
427 """
428 from rhodecode.model.repo import RepoModel
428 from rhodecode.model.repo import RepoModel
429 sa = meta.Session
429 sa = meta.Session
430 rm = RepoModel()
430 rm = RepoModel()
431 user = sa.query(User).filter(User.admin == True).first()
431 user = sa.query(User).filter(User.admin == True).first()
432 if user is None:
432 if user is None:
433 raise Exception('Missing administrative account !')
433 raise Exception('Missing administrative account !')
434 added = []
434 added = []
435
435
436 for name, repo in initial_repo_list.items():
436 for name, repo in initial_repo_list.items():
437 group = map_groups(name.split(Repository.url_sep()))
437 group = map_groups(name.split(Repository.url_sep()))
438 if not rm.get_by_repo_name(name, cache=False):
438 if not rm.get_by_repo_name(name, cache=False):
439 log.info('repository %s not found creating default' % name)
439 log.info('repository %s not found creating default' % name)
440 added.append(name)
440 added.append(name)
441 form_data = {
441 form_data = {
442 'repo_name': name,
442 'repo_name': name,
443 'repo_name_full': name,
443 'repo_name_full': name,
444 'repo_type': repo.alias,
444 'repo_type': repo.alias,
445 'description': repo.description \
445 'description': repo.description \
446 if repo.description != 'unknown' else '%s repository' % name,
446 if repo.description != 'unknown' else '%s repository' % name,
447 'private': False,
447 'private': False,
448 'group_id': getattr(group, 'group_id', None)
448 'group_id': getattr(group, 'group_id', None)
449 }
449 }
450 rm.create(form_data, user, just_db=True)
450 rm.create(form_data, user, just_db=True)
451 sa.commit()
451 sa.commit()
452 removed = []
452 removed = []
453 if remove_obsolete:
453 if remove_obsolete:
454 #remove from database those repositories that are not in the filesystem
454 #remove from database those repositories that are not in the filesystem
455 for repo in sa.query(Repository).all():
455 for repo in sa.query(Repository).all():
456 if repo.repo_name not in initial_repo_list.keys():
456 if repo.repo_name not in initial_repo_list.keys():
457 removed.append(repo.repo_name)
457 removed.append(repo.repo_name)
458 sa.delete(repo)
458 sa.delete(repo)
459 sa.commit()
459 sa.commit()
460
460
461 return added, removed
461 return added, removed
462
462
463
463
464 # set cache regions for beaker so celery can utilise it
464 # set cache regions for beaker so celery can utilise it
465 def add_cache(settings):
465 def add_cache(settings):
466 cache_settings = {'regions': None}
466 cache_settings = {'regions': None}
467 for key in settings.keys():
467 for key in settings.keys():
468 for prefix in ['beaker.cache.', 'cache.']:
468 for prefix in ['beaker.cache.', 'cache.']:
469 if key.startswith(prefix):
469 if key.startswith(prefix):
470 name = key.split(prefix)[1].strip()
470 name = key.split(prefix)[1].strip()
471 cache_settings[name] = settings[key].strip()
471 cache_settings[name] = settings[key].strip()
472 if cache_settings['regions']:
472 if cache_settings['regions']:
473 for region in cache_settings['regions'].split(','):
473 for region in cache_settings['regions'].split(','):
474 region = region.strip()
474 region = region.strip()
475 region_settings = {}
475 region_settings = {}
476 for key, value in cache_settings.items():
476 for key, value in cache_settings.items():
477 if key.startswith(region):
477 if key.startswith(region):
478 region_settings[key.split('.')[1]] = value
478 region_settings[key.split('.')[1]] = value
479 region_settings['expire'] = int(region_settings.get('expire',
479 region_settings['expire'] = int(region_settings.get('expire',
480 60))
480 60))
481 region_settings.setdefault('lock_dir',
481 region_settings.setdefault('lock_dir',
482 cache_settings.get('lock_dir'))
482 cache_settings.get('lock_dir'))
483 region_settings.setdefault('data_dir',
483 region_settings.setdefault('data_dir',
484 cache_settings.get('data_dir'))
484 cache_settings.get('data_dir'))
485
485
486 if 'type' not in region_settings:
486 if 'type' not in region_settings:
487 region_settings['type'] = cache_settings.get('type',
487 region_settings['type'] = cache_settings.get('type',
488 'memory')
488 'memory')
489 beaker.cache.cache_regions[region] = region_settings
489 beaker.cache.cache_regions[region] = region_settings
490
490
491
491
492 #==============================================================================
492 #==============================================================================
493 # TEST FUNCTIONS AND CREATORS
493 # TEST FUNCTIONS AND CREATORS
494 #==============================================================================
494 #==============================================================================
495 def create_test_index(repo_location, config, full_index):
495 def create_test_index(repo_location, config, full_index):
496 """
496 """
497 Makes default test index
497 Makes default test index
498
498
499 :param config: test config
499 :param config: test config
500 :param full_index:
500 :param full_index:
501 """
501 """
502
502
503 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
503 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
504 from rhodecode.lib.pidlock import DaemonLock, LockHeld
504 from rhodecode.lib.pidlock import DaemonLock, LockHeld
505
505
506 repo_location = repo_location
506 repo_location = repo_location
507
507
508 index_location = os.path.join(config['app_conf']['index_dir'])
508 index_location = os.path.join(config['app_conf']['index_dir'])
509 if not os.path.exists(index_location):
509 if not os.path.exists(index_location):
510 os.makedirs(index_location)
510 os.makedirs(index_location)
511
511
512 try:
512 try:
513 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
513 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
514 WhooshIndexingDaemon(index_location=index_location,
514 WhooshIndexingDaemon(index_location=index_location,
515 repo_location=repo_location)\
515 repo_location=repo_location)\
516 .run(full_index=full_index)
516 .run(full_index=full_index)
517 l.release()
517 l.release()
518 except LockHeld:
518 except LockHeld:
519 pass
519 pass
520
520
521
521
522 def create_test_env(repos_test_path, config):
522 def create_test_env(repos_test_path, config):
523 """
523 """
524 Makes a fresh database and
524 Makes a fresh database and
525 install test repository into tmp dir
525 install test repository into tmp dir
526 """
526 """
527 from rhodecode.lib.db_manage import DbManage
527 from rhodecode.lib.db_manage import DbManage
528 from rhodecode.tests import HG_REPO, TESTS_TMP_PATH
528 from rhodecode.tests import HG_REPO, TESTS_TMP_PATH
529
529
530 # PART ONE create db
530 # PART ONE create db
531 dbconf = config['sqlalchemy.db1.url']
531 dbconf = config['sqlalchemy.db1.url']
532 log.debug('making test db %s' % dbconf)
532 log.debug('making test db %s' % dbconf)
533
533
534 # create test dir if it doesn't exist
534 # create test dir if it doesn't exist
535 if not os.path.isdir(repos_test_path):
535 if not os.path.isdir(repos_test_path):
536 log.debug('Creating testdir %s' % repos_test_path)
536 log.debug('Creating testdir %s' % repos_test_path)
537 os.makedirs(repos_test_path)
537 os.makedirs(repos_test_path)
538
538
539 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
539 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
540 tests=True)
540 tests=True)
541 dbmanage.create_tables(override=True)
541 dbmanage.create_tables(override=True)
542 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
542 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
543 dbmanage.create_default_user()
543 dbmanage.create_default_user()
544 dbmanage.admin_prompt()
544 dbmanage.admin_prompt()
545 dbmanage.create_permissions()
545 dbmanage.create_permissions()
546 dbmanage.populate_default_permissions()
546 dbmanage.populate_default_permissions()
547 Session.commit()
547 Session.commit()
548 # PART TWO make test repo
548 # PART TWO make test repo
549 log.debug('making test vcs repositories')
549 log.debug('making test vcs repositories')
550
550
551 idx_path = config['app_conf']['index_dir']
551 idx_path = config['app_conf']['index_dir']
552 data_path = config['app_conf']['cache_dir']
552 data_path = config['app_conf']['cache_dir']
553
553
554 #clean index and data
554 #clean index and data
555 if idx_path and os.path.exists(idx_path):
555 if idx_path and os.path.exists(idx_path):
556 log.debug('remove %s' % idx_path)
556 log.debug('remove %s' % idx_path)
557 shutil.rmtree(idx_path)
557 shutil.rmtree(idx_path)
558
558
559 if data_path and os.path.exists(data_path):
559 if data_path and os.path.exists(data_path):
560 log.debug('remove %s' % data_path)
560 log.debug('remove %s' % data_path)
561 shutil.rmtree(data_path)
561 shutil.rmtree(data_path)
562
562
563 #CREATE DEFAULT HG REPOSITORY
563 #CREATE DEFAULT HG REPOSITORY
564 cur_dir = dn(dn(abspath(__file__)))
564 cur_dir = dn(dn(abspath(__file__)))
565 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
565 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
566 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
566 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
567 tar.close()
567 tar.close()
568
568
569
569
570 #==============================================================================
570 #==============================================================================
571 # PASTER COMMANDS
571 # PASTER COMMANDS
572 #==============================================================================
572 #==============================================================================
573 class BasePasterCommand(Command):
573 class BasePasterCommand(Command):
574 """
574 """
575 Abstract Base Class for paster commands.
575 Abstract Base Class for paster commands.
576
576
577 The celery commands are somewhat aggressive about loading
577 The celery commands are somewhat aggressive about loading
578 celery.conf, and since our module sets the `CELERY_LOADER`
578 celery.conf, and since our module sets the `CELERY_LOADER`
579 environment variable to our loader, we have to bootstrap a bit and
579 environment variable to our loader, we have to bootstrap a bit and
580 make sure we've had a chance to load the pylons config off of the
580 make sure we've had a chance to load the pylons config off of the
581 command line, otherwise everything fails.
581 command line, otherwise everything fails.
582 """
582 """
583 min_args = 1
583 min_args = 1
584 min_args_error = "Please provide a paster config file as an argument."
584 min_args_error = "Please provide a paster config file as an argument."
585 takes_config_file = 1
585 takes_config_file = 1
586 requires_config_file = True
586 requires_config_file = True
587
587
588 def notify_msg(self, msg, log=False):
588 def notify_msg(self, msg, log=False):
589 """Make a notification to user, additionally if logger is passed
589 """Make a notification to user, additionally if logger is passed
590 it logs this action using given logger
590 it logs this action using given logger
591
591
592 :param msg: message that will be printed to user
592 :param msg: message that will be printed to user
593 :param log: logging instance, to use to additionally log this message
593 :param log: logging instance, to use to additionally log this message
594
594
595 """
595 """
596 if log and isinstance(log, logging):
596 if log and isinstance(log, logging):
597 log(msg)
597 log(msg)
598
598
599 def run(self, args):
599 def run(self, args):
600 """
600 """
601 Overrides Command.run
601 Overrides Command.run
602
602
603 Checks for a config file argument and loads it.
603 Checks for a config file argument and loads it.
604 """
604 """
605 if len(args) < self.min_args:
605 if len(args) < self.min_args:
606 raise BadCommand(
606 raise BadCommand(
607 self.min_args_error % {'min_args': self.min_args,
607 self.min_args_error % {'min_args': self.min_args,
608 'actual_args': len(args)})
608 'actual_args': len(args)})
609
609
610 # Decrement because we're going to lob off the first argument.
610 # Decrement because we're going to lob off the first argument.
611 # @@ This is hacky
611 # @@ This is hacky
612 self.min_args -= 1
612 self.min_args -= 1
613 self.bootstrap_config(args[0])
613 self.bootstrap_config(args[0])
614 self.update_parser()
614 self.update_parser()
615 return super(BasePasterCommand, self).run(args[1:])
615 return super(BasePasterCommand, self).run(args[1:])
616
616
617 def update_parser(self):
617 def update_parser(self):
618 """
618 """
619 Abstract method. Allows for the class's parser to be updated
619 Abstract method. Allows for the class's parser to be updated
620 before the superclass's `run` method is called. Necessary to
620 before the superclass's `run` method is called. Necessary to
621 allow options/arguments to be passed through to the underlying
621 allow options/arguments to be passed through to the underlying
622 celery command.
622 celery command.
623 """
623 """
624 raise NotImplementedError("Abstract Method.")
624 raise NotImplementedError("Abstract Method.")
625
625
626 def bootstrap_config(self, conf):
626 def bootstrap_config(self, conf):
627 """
627 """
628 Loads the pylons configuration.
628 Loads the pylons configuration.
629 """
629 """
630 from pylons import config as pylonsconfig
630 from pylons import config as pylonsconfig
631
631
632 path_to_ini_file = os.path.realpath(conf)
632 self.path_to_ini_file = os.path.realpath(conf)
633 conf = paste.deploy.appconfig('config:' + path_to_ini_file)
633 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
634 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
634 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
General Comments 0
You need to be logged in to leave comments. Login now