##// END OF EJS Templates
Whoosh logging is now controlled by the .ini files logging setup
marcink -
r2102:04d26165 beta
parent child Browse files
Show More
@@ -1,294 +1,301 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # RhodeCode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 pdebug = false
11 11 ################################################################################
12 12 ## Uncomment and replace with the address which should receive ##
13 13 ## any error reports after application crash ##
14 14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 15 ################################################################################
16 16 #email_to = admin@localhost
17 17 #error_email_from = paste_error@localhost
18 18 #app_email_from = rhodecode-noreply@localhost
19 19 #error_message =
20 20 #email_prefix = [RhodeCode]
21 21
22 22 #smtp_server = mail.server.com
23 23 #smtp_username =
24 24 #smtp_password =
25 25 #smtp_port =
26 26 #smtp_use_tls = false
27 27 #smtp_use_ssl = true
28 28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 29 #smtp_auth =
30 30
31 31 [server:main]
32 32 ##nr of threads to spawn
33 33 threadpool_workers = 5
34 34
35 35 ##max request before thread respawn
36 36 threadpool_max_requests = 10
37 37
38 38 ##option to use threads of process
39 39 use_threadpool = true
40 40
41 41 use = egg:Paste#http
42 42 host = 0.0.0.0
43 43 port = 5000
44 44
45 45 [app:main]
46 46 use = egg:rhodecode
47 47 full_stack = true
48 48 static_files = true
49 49 lang = en
50 50 cache_dir = %(here)s/data
51 51 index_dir = %(here)s/data/index
52 52 app_instance_uuid = rc-develop
53 53 cut_off_limit = 256000
54 54 force_https = false
55 55 commit_parse_limit = 25
56 56 use_gravatar = true
57 57 container_auth_enabled = false
58 58 proxypass_auth_enabled = false
59 59 default_encoding = utf8
60 60
61 61 ## overwrite schema of clone url
62 62 ## available vars:
63 63 ## scheme - http/https
64 64 ## user - current user
65 65 ## pass - password
66 66 ## netloc - network location
67 67 ## path - usually repo_name
68 68
69 69 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
70 70
71 71 ## issue tracking mapping for commits messages
72 72 ## comment out issue_pat, issue_server, issue_prefix to enable
73 73
74 74 ## pattern to get the issues from commit messages
75 75 ## default one used here is #<numbers> with a regex passive group for `#`
76 76 ## {id} will be all groups matched from this pattern
77 77
78 78 issue_pat = (?:\s*#)(\d+)
79 79
80 80 ## server url to the issue, each {id} will be replaced with match
81 81 ## fetched from the regex and {repo} is replaced with repository name
82 82
83 83 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
84 84
85 85 ## prefix to add to link to indicate it's an url
86 86 ## #314 will be replaced by <issue_prefix><id>
87 87
88 88 issue_prefix = #
89 89
90 90 ## instance-id prefix
91 91 ## a prefix key for this instance used for cache invalidation when running
92 92 ## multiple instances of rhodecode, make sure it's globally unique for
93 93 ## all running rhodecode instances. Leave empty if you don't use it
94 94 instance_id =
95 95
96 96 ####################################
97 97 ### CELERY CONFIG ####
98 98 ####################################
99 99 use_celery = false
100 100 broker.host = localhost
101 101 broker.vhost = rabbitmqhost
102 102 broker.port = 5672
103 103 broker.user = rabbitmq
104 104 broker.password = qweqwe
105 105
106 106 celery.imports = rhodecode.lib.celerylib.tasks
107 107
108 108 celery.result.backend = amqp
109 109 celery.result.dburi = amqp://
110 110 celery.result.serialier = json
111 111
112 112 #celery.send.task.error.emails = true
113 113 #celery.amqp.task.result.expires = 18000
114 114
115 115 celeryd.concurrency = 2
116 116 #celeryd.log.file = celeryd.log
117 117 celeryd.log.level = debug
118 118 celeryd.max.tasks.per.child = 1
119 119
120 120 #tasks will never be sent to the queue, but executed locally instead.
121 121 celery.always.eager = false
122 122
123 123 ####################################
124 124 ### BEAKER CACHE ####
125 125 ####################################
126 126 beaker.cache.data_dir=%(here)s/data/cache/data
127 127 beaker.cache.lock_dir=%(here)s/data/cache/lock
128 128
129 129 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
130 130
131 131 beaker.cache.super_short_term.type=memory
132 132 beaker.cache.super_short_term.expire=10
133 133 beaker.cache.super_short_term.key_length = 256
134 134
135 135 beaker.cache.short_term.type=memory
136 136 beaker.cache.short_term.expire=60
137 137 beaker.cache.short_term.key_length = 256
138 138
139 139 beaker.cache.long_term.type=memory
140 140 beaker.cache.long_term.expire=36000
141 141 beaker.cache.long_term.key_length = 256
142 142
143 143 beaker.cache.sql_cache_short.type=memory
144 144 beaker.cache.sql_cache_short.expire=10
145 145 beaker.cache.sql_cache_short.key_length = 256
146 146
147 147 beaker.cache.sql_cache_med.type=memory
148 148 beaker.cache.sql_cache_med.expire=360
149 149 beaker.cache.sql_cache_med.key_length = 256
150 150
151 151 beaker.cache.sql_cache_long.type=file
152 152 beaker.cache.sql_cache_long.expire=3600
153 153 beaker.cache.sql_cache_long.key_length = 256
154 154
155 155 ####################################
156 156 ### BEAKER SESSION ####
157 157 ####################################
158 158 ## Type of storage used for the session, current types are
159 159 ## dbm, file, memcached, database, and memory.
160 160 ## The storage uses the Container API
161 161 ## that is also used by the cache system.
162 162
163 163 ## db session example
164 164
165 165 #beaker.session.type = ext:database
166 166 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
167 167 #beaker.session.table_name = db_session
168 168
169 169 ## encrypted cookie session, good for many instances
170 170 #beaker.session.type = cookie
171 171
172 172 beaker.session.type = file
173 173 beaker.session.key = rhodecode
174 # secure cookie requires AES python libraries
174 175 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
175 176 #beaker.session.validate_key = 9712sds2212c--zxc123
176 177 beaker.session.timeout = 36000
177 178 beaker.session.httponly = true
178 179
179 180 ## uncomment for https secure cookie
180 181 beaker.session.secure = false
181 182
182 183 ##auto save the session to not to use .save()
183 184 beaker.session.auto = False
184 185
185 186 ##true exire at browser close
186 187 #beaker.session.cookie_expires = 3600
187 188
188 189
189 190 ################################################################################
190 191 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
191 192 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
192 193 ## execute malicious code after an exception is raised. ##
193 194 ################################################################################
194 195 #set debug = false
195 196
196 197 ##################################
197 198 ### LOGVIEW CONFIG ###
198 199 ##################################
199 200 logview.sqlalchemy = #faa
200 201 logview.pylons.templating = #bfb
201 202 logview.pylons.util = #eee
202 203
203 204 #########################################################
204 205 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
205 206 #########################################################
206 207 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
207 208 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
208 209 sqlalchemy.db1.echo = false
209 210 sqlalchemy.db1.pool_recycle = 3600
210 211 sqlalchemy.convert_unicode = true
211 212
212 213 ################################
213 214 ### LOGGING CONFIGURATION ####
214 215 ################################
215 216 [loggers]
216 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
217 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
217 218
218 219 [handlers]
219 220 keys = console, console_sql
220 221
221 222 [formatters]
222 223 keys = generic, color_formatter, color_formatter_sql
223 224
224 225 #############
225 226 ## LOGGERS ##
226 227 #############
227 228 [logger_root]
228 229 level = NOTSET
229 230 handlers = console
230 231
231 232 [logger_routes]
232 233 level = DEBUG
233 234 handlers =
234 235 qualname = routes.middleware
235 236 # "level = DEBUG" logs the route matched and routing variables.
236 237 propagate = 1
237 238
238 239 [logger_beaker]
239 240 level = DEBUG
240 241 handlers =
241 242 qualname = beaker.container
242 243 propagate = 1
243 244
244 245 [logger_templates]
245 246 level = INFO
246 247 handlers =
247 248 qualname = pylons.templating
248 249 propagate = 1
249 250
250 251 [logger_rhodecode]
251 252 level = DEBUG
252 253 handlers =
253 254 qualname = rhodecode
254 255 propagate = 1
255 256
256 257 [logger_sqlalchemy]
257 258 level = INFO
258 259 handlers = console_sql
259 260 qualname = sqlalchemy.engine
260 261 propagate = 0
261 262
263 [logger_whoosh_indexer]
264 level = DEBUG
265 handlers =
266 qualname = whoosh_indexer
267 propagate = 1
268
262 269 ##############
263 270 ## HANDLERS ##
264 271 ##############
265 272
266 273 [handler_console]
267 274 class = StreamHandler
268 275 args = (sys.stderr,)
269 276 level = DEBUG
270 277 formatter = color_formatter
271 278
272 279 [handler_console_sql]
273 280 class = StreamHandler
274 281 args = (sys.stderr,)
275 282 level = DEBUG
276 283 formatter = color_formatter_sql
277 284
278 285 ################
279 286 ## FORMATTERS ##
280 287 ################
281 288
282 289 [formatter_generic]
283 290 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
284 291 datefmt = %Y-%m-%d %H:%M:%S
285 292
286 293 [formatter_color_formatter]
287 294 class=rhodecode.lib.colored_formatter.ColorFormatter
288 295 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
289 296 datefmt = %Y-%m-%d %H:%M:%S
290 297
291 298 [formatter_color_formatter_sql]
292 299 class=rhodecode.lib.colored_formatter.ColorFormatterSql
293 300 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
294 301 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,295 +1,301 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # RhodeCode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 pdebug = false
11 11 ################################################################################
12 12 ## Uncomment and replace with the address which should receive ##
13 13 ## any error reports after application crash ##
14 14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 15 ################################################################################
16 16 #email_to = admin@localhost
17 17 #error_email_from = paste_error@localhost
18 18 #app_email_from = rhodecode-noreply@localhost
19 19 #error_message =
20 20 #email_prefix = [RhodeCode]
21 21
22 22 #smtp_server = mail.server.com
23 23 #smtp_username =
24 24 #smtp_password =
25 25 #smtp_port =
26 26 #smtp_use_tls = false
27 27 #smtp_use_ssl = true
28 28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 29 #smtp_auth =
30 30
31 31 [server:main]
32 32 ##nr of threads to spawn
33 33 threadpool_workers = 5
34 34
35 35 ##max request before thread respawn
36 36 threadpool_max_requests = 10
37 37
38 38 ##option to use threads of process
39 39 use_threadpool = true
40 40
41 41 use = egg:Paste#http
42 42 host = 127.0.0.1
43 43 port = 8001
44 44
45 45 [app:main]
46 46 use = egg:rhodecode
47 47 full_stack = true
48 48 static_files = true
49 49 lang = en
50 50 cache_dir = %(here)s/data
51 51 index_dir = %(here)s/data/index
52 52 app_instance_uuid = rc-production
53 53 cut_off_limit = 256000
54 54 force_https = false
55 55 commit_parse_limit = 50
56 56 use_gravatar = true
57 57 container_auth_enabled = false
58 58 proxypass_auth_enabled = false
59 59 default_encoding = utf8
60 60
61 61 ## overwrite schema of clone url
62 62 ## available vars:
63 63 ## scheme - http/https
64 64 ## user - current user
65 65 ## pass - password
66 66 ## netloc - network location
67 67 ## path - usually repo_name
68 68
69 69 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
70 70
71 71 ## issue tracking mapping for commits messages
72 72 ## comment out issue_pat, issue_server, issue_prefix to enable
73 73
74 74 ## pattern to get the issues from commit messages
75 75 ## default one used here is #<numbers> with a regex passive group for `#`
76 76 ## {id} will be all groups matched from this pattern
77 77
78 78 issue_pat = (?:\s*#)(\d+)
79 79
80 80 ## server url to the issue, each {id} will be replaced with match
81 81 ## fetched from the regex and {repo} is replaced with repository name
82 82
83 83 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
84 84
85 85 ## prefix to add to link to indicate it's an url
86 86 ## #314 will be replaced by <issue_prefix><id>
87 87
88 88 issue_prefix = #
89 89
90 90 ## instance-id prefix
91 91 ## a prefix key for this instance used for cache invalidation when running
92 92 ## multiple instances of rhodecode, make sure it's globally unique for
93 93 ## all running rhodecode instances. Leave empty if you don't use it
94 94 instance_id =
95 95
96 96 ####################################
97 97 ### CELERY CONFIG ####
98 98 ####################################
99 99 use_celery = false
100 100 broker.host = localhost
101 101 broker.vhost = rabbitmqhost
102 102 broker.port = 5672
103 103 broker.user = rabbitmq
104 104 broker.password = qweqwe
105 105
106 106 celery.imports = rhodecode.lib.celerylib.tasks
107 107
108 108 celery.result.backend = amqp
109 109 celery.result.dburi = amqp://
110 110 celery.result.serialier = json
111 111
112 112 #celery.send.task.error.emails = true
113 113 #celery.amqp.task.result.expires = 18000
114 114
115 115 celeryd.concurrency = 2
116 116 #celeryd.log.file = celeryd.log
117 117 celeryd.log.level = debug
118 118 celeryd.max.tasks.per.child = 1
119 119
120 120 #tasks will never be sent to the queue, but executed locally instead.
121 121 celery.always.eager = false
122 122
123 123 ####################################
124 124 ### BEAKER CACHE ####
125 125 ####################################
126 126 beaker.cache.data_dir=%(here)s/data/cache/data
127 127 beaker.cache.lock_dir=%(here)s/data/cache/lock
128 128
129 129 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
130 130
131 131 beaker.cache.super_short_term.type=memory
132 132 beaker.cache.super_short_term.expire=10
133 133 beaker.cache.super_short_term.key_length = 256
134 134
135 135 beaker.cache.short_term.type=memory
136 136 beaker.cache.short_term.expire=60
137 137 beaker.cache.short_term.key_length = 256
138 138
139 139 beaker.cache.long_term.type=memory
140 140 beaker.cache.long_term.expire=36000
141 141 beaker.cache.long_term.key_length = 256
142 142
143 143 beaker.cache.sql_cache_short.type=memory
144 144 beaker.cache.sql_cache_short.expire=10
145 145 beaker.cache.sql_cache_short.key_length = 256
146 146
147 147 beaker.cache.sql_cache_med.type=memory
148 148 beaker.cache.sql_cache_med.expire=360
149 149 beaker.cache.sql_cache_med.key_length = 256
150 150
151 151 beaker.cache.sql_cache_long.type=file
152 152 beaker.cache.sql_cache_long.expire=3600
153 153 beaker.cache.sql_cache_long.key_length = 256
154 154
155 155 ####################################
156 156 ### BEAKER SESSION ####
157 157 ####################################
158 158 ## Type of storage used for the session, current types are
159 159 ## dbm, file, memcached, database, and memory.
160 160 ## The storage uses the Container API
161 161 ## that is also used by the cache system.
162 162
163 163 ## db session example
164 164
165 165 #beaker.session.type = ext:database
166 166 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
167 167 #beaker.session.table_name = db_session
168 168
169 169 ## encrypted cookie session, good for many instances
170 170 #beaker.session.type = cookie
171 171
172 172 beaker.session.type = file
173 173 beaker.session.key = rhodecode
174 174 # secure cookie requires AES python libraries
175 175 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
176 176 #beaker.session.validate_key = 9712sds2212c--zxc123
177 177 beaker.session.timeout = 36000
178 178 beaker.session.httponly = true
179 179
180 180 ## uncomment for https secure cookie
181 181 beaker.session.secure = false
182 182
183 183 ##auto save the session to not to use .save()
184 184 beaker.session.auto = False
185 185
186 186 ##true exire at browser close
187 187 #beaker.session.cookie_expires = 3600
188 188
189 189
190 190 ################################################################################
191 191 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
192 192 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
193 193 ## execute malicious code after an exception is raised. ##
194 194 ################################################################################
195 195 set debug = false
196 196
197 197 ##################################
198 198 ### LOGVIEW CONFIG ###
199 199 ##################################
200 200 logview.sqlalchemy = #faa
201 201 logview.pylons.templating = #bfb
202 202 logview.pylons.util = #eee
203 203
204 204 #########################################################
205 205 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
206 206 #########################################################
207 207 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
208 208 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
209 209 sqlalchemy.db1.echo = false
210 210 sqlalchemy.db1.pool_recycle = 3600
211 211 sqlalchemy.convert_unicode = true
212 212
213 213 ################################
214 214 ### LOGGING CONFIGURATION ####
215 215 ################################
216 216 [loggers]
217 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
217 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
218 218
219 219 [handlers]
220 220 keys = console, console_sql
221 221
222 222 [formatters]
223 223 keys = generic, color_formatter, color_formatter_sql
224 224
225 225 #############
226 226 ## LOGGERS ##
227 227 #############
228 228 [logger_root]
229 229 level = NOTSET
230 230 handlers = console
231 231
232 232 [logger_routes]
233 233 level = DEBUG
234 234 handlers =
235 235 qualname = routes.middleware
236 236 # "level = DEBUG" logs the route matched and routing variables.
237 237 propagate = 1
238 238
239 239 [logger_beaker]
240 240 level = DEBUG
241 241 handlers =
242 242 qualname = beaker.container
243 243 propagate = 1
244 244
245 245 [logger_templates]
246 246 level = INFO
247 247 handlers =
248 248 qualname = pylons.templating
249 249 propagate = 1
250 250
251 251 [logger_rhodecode]
252 252 level = DEBUG
253 253 handlers =
254 254 qualname = rhodecode
255 255 propagate = 1
256 256
257 257 [logger_sqlalchemy]
258 258 level = INFO
259 259 handlers = console_sql
260 260 qualname = sqlalchemy.engine
261 261 propagate = 0
262 262
263 [logger_whoosh_indexer]
264 level = DEBUG
265 handlers =
266 qualname = whoosh_indexer
267 propagate = 1
268
263 269 ##############
264 270 ## HANDLERS ##
265 271 ##############
266 272
267 273 [handler_console]
268 274 class = StreamHandler
269 275 args = (sys.stderr,)
270 276 level = INFO
271 277 formatter = generic
272 278
273 279 [handler_console_sql]
274 280 class = StreamHandler
275 281 args = (sys.stderr,)
276 282 level = WARN
277 283 formatter = generic
278 284
279 285 ################
280 286 ## FORMATTERS ##
281 287 ################
282 288
283 289 [formatter_generic]
284 290 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
285 291 datefmt = %Y-%m-%d %H:%M:%S
286 292
287 293 [formatter_color_formatter]
288 294 class=rhodecode.lib.colored_formatter.ColorFormatter
289 295 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
290 296 datefmt = %Y-%m-%d %H:%M:%S
291 297
292 298 [formatter_color_formatter_sql]
293 299 class=rhodecode.lib.colored_formatter.ColorFormatterSql
294 300 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
295 301 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,305 +1,311 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # RhodeCode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 pdebug = false
11 11 ################################################################################
12 12 ## Uncomment and replace with the address which should receive ##
13 13 ## any error reports after application crash ##
14 14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 15 ################################################################################
16 16 #email_to = admin@localhost
17 17 #error_email_from = paste_error@localhost
18 18 #app_email_from = rhodecode-noreply@localhost
19 19 #error_message =
20 20 #email_prefix = [RhodeCode]
21 21
22 22 #smtp_server = mail.server.com
23 23 #smtp_username =
24 24 #smtp_password =
25 25 #smtp_port =
26 26 #smtp_use_tls = false
27 27 #smtp_use_ssl = true
28 28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 29 #smtp_auth =
30 30
31 31 [server:main]
32 32 ##nr of threads to spawn
33 33 threadpool_workers = 5
34 34
35 35 ##max request before thread respawn
36 36 threadpool_max_requests = 10
37 37
38 38 ##option to use threads of process
39 39 use_threadpool = true
40 40
41 41 use = egg:Paste#http
42 42 host = 127.0.0.1
43 43 port = 5000
44 44
45 45 [app:main]
46 46 use = egg:rhodecode
47 47 full_stack = true
48 48 static_files = true
49 49 lang = en
50 50 cache_dir = %(here)s/data
51 51 index_dir = %(here)s/data/index
52 52 app_instance_uuid = ${app_instance_uuid}
53 53 cut_off_limit = 256000
54 54 force_https = false
55 55 commit_parse_limit = 50
56 56 use_gravatar = true
57 57 container_auth_enabled = false
58 58 proxypass_auth_enabled = false
59 59 default_encoding = utf8
60 60
61 61 ## overwrite schema of clone url
62 62 ## available vars:
63 63 ## scheme - http/https
64 64 ## user - current user
65 65 ## pass - password
66 66 ## netloc - network location
67 67 ## path - usually repo_name
68 68
69 69 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
70 70
71 71 ## issue tracking mapping for commits messages
72 72 ## comment out issue_pat, issue_server, issue_prefix to enable
73 73
74 74 ## pattern to get the issues from commit messages
75 75 ## default one used here is #<numbers> with a regex passive group for `#`
76 76 ## {id} will be all groups matched from this pattern
77 77
78 78 issue_pat = (?:\s*#)(\d+)
79 79
80 80 ## server url to the issue, each {id} will be replaced with match
81 81 ## fetched from the regex and {repo} is replaced with repository name
82 82
83 83 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
84 84
85 85 ## prefix to add to link to indicate it's an url
86 86 ## #314 will be replaced by <issue_prefix><id>
87 87
88 88 issue_prefix = #
89 89
90 90 ## instance-id prefix
91 91 ## a prefix key for this instance used for cache invalidation when running
92 92 ## multiple instances of rhodecode, make sure it's globally unique for
93 93 ## all running rhodecode instances. Leave empty if you don't use it
94 94 instance_id =
95 95
96 96 ####################################
97 97 ### CELERY CONFIG ####
98 98 ####################################
99 99 use_celery = false
100 100 broker.host = localhost
101 101 broker.vhost = rabbitmqhost
102 102 broker.port = 5672
103 103 broker.user = rabbitmq
104 104 broker.password = qweqwe
105 105
106 106 celery.imports = rhodecode.lib.celerylib.tasks
107 107
108 108 celery.result.backend = amqp
109 109 celery.result.dburi = amqp://
110 110 celery.result.serialier = json
111 111
112 112 #celery.send.task.error.emails = true
113 113 #celery.amqp.task.result.expires = 18000
114 114
115 115 celeryd.concurrency = 2
116 116 #celeryd.log.file = celeryd.log
117 117 celeryd.log.level = debug
118 118 celeryd.max.tasks.per.child = 1
119 119
120 120 #tasks will never be sent to the queue, but executed locally instead.
121 121 celery.always.eager = false
122 122
123 123 ####################################
124 124 ### BEAKER CACHE ####
125 125 ####################################
126 126 beaker.cache.data_dir=%(here)s/data/cache/data
127 127 beaker.cache.lock_dir=%(here)s/data/cache/lock
128 128
129 129 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
130 130
131 131 beaker.cache.super_short_term.type=memory
132 132 beaker.cache.super_short_term.expire=10
133 133 beaker.cache.super_short_term.key_length = 256
134 134
135 135 beaker.cache.short_term.type=memory
136 136 beaker.cache.short_term.expire=60
137 137 beaker.cache.short_term.key_length = 256
138 138
139 139 beaker.cache.long_term.type=memory
140 140 beaker.cache.long_term.expire=36000
141 141 beaker.cache.long_term.key_length = 256
142 142
143 143 beaker.cache.sql_cache_short.type=memory
144 144 beaker.cache.sql_cache_short.expire=10
145 145 beaker.cache.sql_cache_short.key_length = 256
146 146
147 147 beaker.cache.sql_cache_med.type=memory
148 148 beaker.cache.sql_cache_med.expire=360
149 149 beaker.cache.sql_cache_med.key_length = 256
150 150
151 151 beaker.cache.sql_cache_long.type=file
152 152 beaker.cache.sql_cache_long.expire=3600
153 153 beaker.cache.sql_cache_long.key_length = 256
154 154
155 155 ####################################
156 156 ### BEAKER SESSION ####
157 157 ####################################
158 158 ## Type of storage used for the session, current types are
159 159 ## dbm, file, memcached, database, and memory.
160 160 ## The storage uses the Container API
161 161 ## that is also used by the cache system.
162 162
163 163 ## db session example
164 164
165 165 #beaker.session.type = ext:database
166 166 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
167 167 #beaker.session.table_name = db_session
168 168
169 169 ## encrypted cookie session, good for many instances
170 170 #beaker.session.type = cookie
171 171
172 172 beaker.session.type = file
173 173 beaker.session.key = rhodecode
174 174 # secure cookie requires AES python libraries
175 175 #beaker.session.encrypt_key = ${app_instance_secret}
176 176 #beaker.session.validate_key = ${app_instance_secret}
177 177 beaker.session.timeout = 36000
178 178 beaker.session.httponly = true
179 179
180 180 ## uncomment for https secure cookie
181 181 beaker.session.secure = false
182 182
183 183 ##auto save the session to not to use .save()
184 184 beaker.session.auto = False
185 185
186 186 ##true exire at browser close
187 187 #beaker.session.cookie_expires = 3600
188 188
189 189
190 190 ################################################################################
191 191 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
192 192 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
193 193 ## execute malicious code after an exception is raised. ##
194 194 ################################################################################
195 195 set debug = false
196 196
197 197 ##################################
198 198 ### LOGVIEW CONFIG ###
199 199 ##################################
200 200 logview.sqlalchemy = #faa
201 201 logview.pylons.templating = #bfb
202 202 logview.pylons.util = #eee
203 203
204 204 #########################################################
205 205 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
206 206 #########################################################
207 207
208 208 # SQLITE [default]
209 209 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
210 210
211 211 # POSTGRESQL
212 212 # sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode
213 213
214 214 # MySQL
215 215 # sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode
216 216
217 217 # see sqlalchemy docs for others
218 218
219 219 sqlalchemy.db1.echo = false
220 220 sqlalchemy.db1.pool_recycle = 3600
221 221 sqlalchemy.convert_unicode = true
222 222
223 223 ################################
224 224 ### LOGGING CONFIGURATION ####
225 225 ################################
226 226 [loggers]
227 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
227 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
228 228
229 229 [handlers]
230 230 keys = console, console_sql
231 231
232 232 [formatters]
233 233 keys = generic, color_formatter, color_formatter_sql
234 234
235 235 #############
236 236 ## LOGGERS ##
237 237 #############
238 238 [logger_root]
239 239 level = NOTSET
240 240 handlers = console
241 241
242 242 [logger_routes]
243 243 level = DEBUG
244 244 handlers =
245 245 qualname = routes.middleware
246 246 # "level = DEBUG" logs the route matched and routing variables.
247 247 propagate = 1
248 248
249 249 [logger_beaker]
250 250 level = DEBUG
251 251 handlers =
252 252 qualname = beaker.container
253 253 propagate = 1
254 254
255 255 [logger_templates]
256 256 level = INFO
257 257 handlers =
258 258 qualname = pylons.templating
259 259 propagate = 1
260 260
261 261 [logger_rhodecode]
262 262 level = DEBUG
263 263 handlers =
264 264 qualname = rhodecode
265 265 propagate = 1
266 266
267 267 [logger_sqlalchemy]
268 268 level = INFO
269 269 handlers = console_sql
270 270 qualname = sqlalchemy.engine
271 271 propagate = 0
272 272
273 [logger_whoosh_indexer]
274 level = DEBUG
275 handlers =
276 qualname = whoosh_indexer
277 propagate = 1
278
273 279 ##############
274 280 ## HANDLERS ##
275 281 ##############
276 282
277 283 [handler_console]
278 284 class = StreamHandler
279 285 args = (sys.stderr,)
280 286 level = INFO
281 287 formatter = generic
282 288
283 289 [handler_console_sql]
284 290 class = StreamHandler
285 291 args = (sys.stderr,)
286 292 level = WARN
287 293 formatter = generic
288 294
289 295 ################
290 296 ## FORMATTERS ##
291 297 ################
292 298
293 299 [formatter_generic]
294 300 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
295 301 datefmt = %Y-%m-%d %H:%M:%S
296 302
297 303 [formatter_color_formatter]
298 304 class=rhodecode.lib.colored_formatter.ColorFormatter
299 305 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
300 306 datefmt = %Y-%m-%d %H:%M:%S
301 307
302 308 [formatter_color_formatter_sql]
303 309 class=rhodecode.lib.colored_formatter.ColorFormatterSql
304 310 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
305 311 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,229 +1,230 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.indexers.__init__
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 Whoosh indexing module for RhodeCode
7 7
8 8 :created_on: Aug 17, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25 import os
26 26 import sys
27 27 import traceback
28 import logging
28 29 from os.path import dirname as dn, join as jn
29 30
30 31 #to get the rhodecode import
31 32 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
32 33
33 34 from string import strip
34 35 from shutil import rmtree
35 36
36 37 from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
37 38 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
38 39 from whoosh.index import create_in, open_dir
39 40 from whoosh.formats import Characters
40 41 from whoosh.highlight import highlight, HtmlFormatter, ContextFragmenter
41 42
42 43 from webhelpers.html.builder import escape
43 44 from sqlalchemy import engine_from_config
44 45
45 46 from rhodecode.model import init_model
46 47 from rhodecode.model.scm import ScmModel
47 48 from rhodecode.model.repo import RepoModel
48 49 from rhodecode.config.environment import load_environment
49 50 from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, LazyProperty
50 51 from rhodecode.lib.utils import BasePasterCommand, Command, add_cache
51 52
52 53 # EXTENSIONS WE WANT TO INDEX CONTENT OFF
53 54 INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys()
54 55
55 56 # CUSTOM ANALYZER wordsplit + lowercase filter
56 57 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
57 58
58 59
59 60 #INDEX SCHEMA DEFINITION
60 61 SCHEMA = Schema(
61 62 owner=TEXT(),
62 63 repository=TEXT(stored=True),
63 64 path=TEXT(stored=True),
64 65 content=FieldType(format=Characters(), analyzer=ANALYZER,
65 66 scorable=True, stored=True),
66 67 modtime=STORED(),
67 68 extension=TEXT(stored=True)
68 69 )
69 70
70 71 IDX_NAME = 'HG_INDEX'
71 72 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
72 73 FRAGMENTER = ContextFragmenter(200)
73 74
74 75
75 76 class MakeIndex(BasePasterCommand):
76 77
77 78 max_args = 1
78 79 min_args = 1
79 80
80 81 usage = "CONFIG_FILE"
81 82 summary = "Creates index for full text search given configuration file"
82 83 group_name = "RhodeCode"
83 84 takes_config_file = -1
84 85 parser = Command.standard_parser(verbose=True)
85 86
86 87 def command(self):
87
88 logging.config.fileConfig(self.path_to_ini_file)
88 89 from pylons import config
89 90 add_cache(config)
90 91 engine = engine_from_config(config, 'sqlalchemy.db1.')
91 92 init_model(engine)
92 93
93 94 index_location = config['index_dir']
94 95 repo_location = self.options.repo_location \
95 96 if self.options.repo_location else RepoModel().repos_path
96 97 repo_list = map(strip, self.options.repo_list.split(',')) \
97 98 if self.options.repo_list else None
98 99
99 100 #======================================================================
100 101 # WHOOSH DAEMON
101 102 #======================================================================
102 103 from rhodecode.lib.pidlock import LockHeld, DaemonLock
103 104 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
104 105 try:
105 106 l = DaemonLock(file_=jn(dn(dn(index_location)), 'make_index.lock'))
106 107 WhooshIndexingDaemon(index_location=index_location,
107 108 repo_location=repo_location,
108 109 repo_list=repo_list)\
109 110 .run(full_index=self.options.full_index)
110 111 l.release()
111 112 except LockHeld:
112 113 sys.exit(1)
113 114
114 115 def update_parser(self):
115 116 self.parser.add_option('--repo-location',
116 117 action='store',
117 118 dest='repo_location',
118 119 help="Specifies repositories location to index OPTIONAL",
119 120 )
120 121 self.parser.add_option('--index-only',
121 122 action='store',
122 123 dest='repo_list',
123 124 help="Specifies a comma separated list of repositores "
124 125 "to build index on OPTIONAL",
125 126 )
126 127 self.parser.add_option('-f',
127 128 action='store_true',
128 129 dest='full_index',
129 130 help="Specifies that index should be made full i.e"
130 131 " destroy old and build from scratch",
131 132 default=False)
132 133
133 134
134 135 class ResultWrapper(object):
135 136 def __init__(self, search_type, searcher, matcher, highlight_items):
136 137 self.search_type = search_type
137 138 self.searcher = searcher
138 139 self.matcher = matcher
139 140 self.highlight_items = highlight_items
140 141 self.fragment_size = 200
141 142
142 143 @LazyProperty
143 144 def doc_ids(self):
144 145 docs_id = []
145 146 while self.matcher.is_active():
146 147 docnum = self.matcher.id()
147 148 chunks = [offsets for offsets in self.get_chunks()]
148 149 docs_id.append([docnum, chunks])
149 150 self.matcher.next()
150 151 return docs_id
151 152
152 153 def __str__(self):
153 154 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
154 155
155 156 def __repr__(self):
156 157 return self.__str__()
157 158
158 159 def __len__(self):
159 160 return len(self.doc_ids)
160 161
161 162 def __iter__(self):
162 163 """
163 164 Allows Iteration over results,and lazy generate content
164 165
165 166 *Requires* implementation of ``__getitem__`` method.
166 167 """
167 168 for docid in self.doc_ids:
168 169 yield self.get_full_content(docid)
169 170
170 171 def __getitem__(self, key):
171 172 """
172 173 Slicing of resultWrapper
173 174 """
174 175 i, j = key.start, key.stop
175 176
176 177 slices = []
177 178 for docid in self.doc_ids[i:j]:
178 179 slices.append(self.get_full_content(docid))
179 180 return slices
180 181
181 182 def get_full_content(self, docid):
182 183 res = self.searcher.stored_fields(docid[0])
183 184 f_path = res['path'][res['path'].find(res['repository']) \
184 185 + len(res['repository']):].lstrip('/')
185 186
186 187 content_short = self.get_short_content(res, docid[1])
187 188 res.update({'content_short': content_short,
188 189 'content_short_hl': self.highlight(content_short),
189 190 'f_path': f_path})
190 191
191 192 return res
192 193
193 194 def get_short_content(self, res, chunks):
194 195
195 196 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
196 197
197 198 def get_chunks(self):
198 199 """
199 200 Smart function that implements chunking the content
200 201 but not overlap chunks so it doesn't highlight the same
201 202 close occurrences twice.
202 203
203 204 :param matcher:
204 205 :param size:
205 206 """
206 207 memory = [(0, 0)]
207 208 for span in self.matcher.spans():
208 209 start = span.startchar or 0
209 210 end = span.endchar or 0
210 211 start_offseted = max(0, start - self.fragment_size)
211 212 end_offseted = end + self.fragment_size
212 213
213 214 if start_offseted < memory[-1][1]:
214 215 start_offseted = memory[-1][1]
215 216 memory.append((start_offseted, end_offseted,))
216 217 yield (start_offseted, end_offseted,)
217 218
218 219 def highlight(self, content, top=5):
219 220 if self.search_type != 'content':
220 221 return ''
221 222 hl = highlight(
222 223 text=escape(content),
223 224 terms=self.highlight_items,
224 225 analyzer=ANALYZER,
225 226 fragmenter=FRAGMENTER,
226 227 formatter=FORMATTER,
227 228 top=top
228 229 )
229 230 return hl
@@ -1,634 +1,634 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.utils
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Utilities library for RhodeCode
7 7
8 8 :created_on: Apr 18, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import datetime
30 30 import traceback
31 31 import paste
32 32 import beaker
33 33 import tarfile
34 34 import shutil
35 35 from os.path import abspath
36 36 from os.path import dirname as dn, join as jn
37 37
38 38 from paste.script.command import Command, BadCommand
39 39
40 40 from mercurial import ui, config
41 41
42 42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 43
44 44 from rhodecode.lib.vcs import get_backend
45 45 from rhodecode.lib.vcs.backends.base import BaseChangeset
46 46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
47 47 from rhodecode.lib.vcs.utils.helpers import get_scm
48 48 from rhodecode.lib.vcs.exceptions import VCSError
49 49
50 50 from rhodecode.lib.caching_query import FromCache
51 51
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
54 54 UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm
55 55 from rhodecode.model.meta import Session
56 56 from rhodecode.model.repos_group import ReposGroupModel
57 57 from rhodecode.lib import safe_str, safe_unicode
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62 62
63 63
64 64 def recursive_replace(str_, replace=' '):
65 65 """Recursive replace of given sign to just one instance
66 66
67 67 :param str_: given string
68 68 :param replace: char to find and replace multiple instances
69 69
70 70 Examples::
71 71 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
72 72 'Mighty-Mighty-Bo-sstones'
73 73 """
74 74
75 75 if str_.find(replace * 2) == -1:
76 76 return str_
77 77 else:
78 78 str_ = str_.replace(replace * 2, replace)
79 79 return recursive_replace(str_, replace)
80 80
81 81
82 82 def repo_name_slug(value):
83 83 """Return slug of name of repository
84 84 This function is called on each creation/modification
85 85 of repository to prevent bad names in repo
86 86 """
87 87
88 88 slug = remove_formatting(value)
89 89 slug = strip_tags(slug)
90 90
91 91 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
92 92 slug = slug.replace(c, '-')
93 93 slug = recursive_replace(slug, '-')
94 94 slug = collapse(slug, '-')
95 95 return slug
96 96
97 97
98 98 def get_repo_slug(request):
99 99 _repo = request.environ['pylons.routes_dict'].get('repo_name')
100 100 if _repo:
101 101 _repo = _repo.rstrip('/')
102 102 return _repo
103 103
104 104
105 105 def get_repos_group_slug(request):
106 106 _group = request.environ['pylons.routes_dict'].get('group_name')
107 107 if _group:
108 108 _group = _group.rstrip('/')
109 109 return _group
110 110
111 111
112 112 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
113 113 """
114 114 Action logger for various actions made by users
115 115
116 116 :param user: user that made this action, can be a unique username string or
117 117 object containing user_id attribute
118 118 :param action: action to log, should be on of predefined unique actions for
119 119 easy translations
120 120 :param repo: string name of repository or object containing repo_id,
121 121 that action was made on
122 122 :param ipaddr: optional ip address from what the action was made
123 123 :param sa: optional sqlalchemy session
124 124
125 125 """
126 126
127 127 if not sa:
128 128 sa = meta.Session
129 129
130 130 try:
131 131 if hasattr(user, 'user_id'):
132 132 user_obj = user
133 133 elif isinstance(user, basestring):
134 134 user_obj = User.get_by_username(user)
135 135 else:
136 136 raise Exception('You have to provide user object or username')
137 137
138 138 if hasattr(repo, 'repo_id'):
139 139 repo_obj = Repository.get(repo.repo_id)
140 140 repo_name = repo_obj.repo_name
141 141 elif isinstance(repo, basestring):
142 142 repo_name = repo.lstrip('/')
143 143 repo_obj = Repository.get_by_repo_name(repo_name)
144 144 else:
145 145 raise Exception('You have to provide repository to action logger')
146 146
147 147 user_log = UserLog()
148 148 user_log.user_id = user_obj.user_id
149 149 user_log.action = action
150 150
151 151 user_log.repository_id = repo_obj.repo_id
152 152 user_log.repository_name = repo_name
153 153
154 154 user_log.action_date = datetime.datetime.now()
155 155 user_log.user_ip = ipaddr
156 156 sa.add(user_log)
157 157
158 158 log.info(
159 159 'Adding user %s, action %s on %s' % (user_obj, action,
160 160 safe_unicode(repo))
161 161 )
162 162 if commit:
163 163 sa.commit()
164 164 except:
165 165 log.error(traceback.format_exc())
166 166 raise
167 167
168 168
169 169 def get_repos(path, recursive=False):
170 170 """
171 171 Scans given path for repos and return (name,(type,path)) tuple
172 172
173 173 :param path: path to scan for repositories
174 174 :param recursive: recursive search and return names with subdirs in front
175 175 """
176 176
177 177 # remove ending slash for better results
178 178 path = path.rstrip(os.sep)
179 179
180 180 def _get_repos(p):
181 181 if not os.access(p, os.W_OK):
182 182 return
183 183 for dirpath in os.listdir(p):
184 184 if os.path.isfile(os.path.join(p, dirpath)):
185 185 continue
186 186 cur_path = os.path.join(p, dirpath)
187 187 try:
188 188 scm_info = get_scm(cur_path)
189 189 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
190 190 except VCSError:
191 191 if not recursive:
192 192 continue
193 193 #check if this dir containts other repos for recursive scan
194 194 rec_path = os.path.join(p, dirpath)
195 195 if os.path.isdir(rec_path):
196 196 for inner_scm in _get_repos(rec_path):
197 197 yield inner_scm
198 198
199 199 return _get_repos(path)
200 200
201 201
202 202 def is_valid_repo(repo_name, base_path):
203 203 """
204 204 Returns True if given path is a valid repository False otherwise
205 205
206 206 :param repo_name:
207 207 :param base_path:
208 208
209 209 :return True: if given path is a valid repository
210 210 """
211 211 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
212 212
213 213 try:
214 214 get_scm(full_path)
215 215 return True
216 216 except VCSError:
217 217 return False
218 218
219 219
220 220 def is_valid_repos_group(repos_group_name, base_path):
221 221 """
222 222 Returns True if given path is a repos group False otherwise
223 223
224 224 :param repo_name:
225 225 :param base_path:
226 226 """
227 227 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
228 228
229 229 # check if it's not a repo
230 230 if is_valid_repo(repos_group_name, base_path):
231 231 return False
232 232
233 233 # check if it's a valid path
234 234 if os.path.isdir(full_path):
235 235 return True
236 236
237 237 return False
238 238
239 239
240 240 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
241 241 while True:
242 242 ok = raw_input(prompt)
243 243 if ok in ('y', 'ye', 'yes'):
244 244 return True
245 245 if ok in ('n', 'no', 'nop', 'nope'):
246 246 return False
247 247 retries = retries - 1
248 248 if retries < 0:
249 249 raise IOError
250 250 print complaint
251 251
252 252 #propagated from mercurial documentation
253 253 ui_sections = ['alias', 'auth',
254 254 'decode/encode', 'defaults',
255 255 'diff', 'email',
256 256 'extensions', 'format',
257 257 'merge-patterns', 'merge-tools',
258 258 'hooks', 'http_proxy',
259 259 'smtp', 'patch',
260 260 'paths', 'profiling',
261 261 'server', 'trusted',
262 262 'ui', 'web', ]
263 263
264 264
265 265 def make_ui(read_from='file', path=None, checkpaths=True):
266 266 """A function that will read python rc files or database
267 267 and make an mercurial ui object from read options
268 268
269 269 :param path: path to mercurial config file
270 270 :param checkpaths: check the path
271 271 :param read_from: read from 'file' or 'db'
272 272 """
273 273
274 274 baseui = ui.ui()
275 275
276 276 # clean the baseui object
277 277 baseui._ocfg = config.config()
278 278 baseui._ucfg = config.config()
279 279 baseui._tcfg = config.config()
280 280
281 281 if read_from == 'file':
282 282 if not os.path.isfile(path):
283 283 log.debug('hgrc file is not present at %s skipping...' % path)
284 284 return False
285 285 log.debug('reading hgrc from %s' % path)
286 286 cfg = config.config()
287 287 cfg.read(path)
288 288 for section in ui_sections:
289 289 for k, v in cfg.items(section):
290 290 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
291 291 baseui.setconfig(section, k, v)
292 292
293 293 elif read_from == 'db':
294 294 sa = meta.Session
295 295 ret = sa.query(RhodeCodeUi)\
296 296 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
297 297 .all()
298 298
299 299 hg_ui = ret
300 300 for ui_ in hg_ui:
301 301 if ui_.ui_active:
302 302 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
303 303 ui_.ui_key, ui_.ui_value)
304 304 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
305 305
306 306 meta.Session.remove()
307 307 return baseui
308 308
309 309
310 310 def set_rhodecode_config(config):
311 311 """
312 312 Updates pylons config with new settings from database
313 313
314 314 :param config:
315 315 """
316 316 hgsettings = RhodeCodeSetting.get_app_settings()
317 317
318 318 for k, v in hgsettings.items():
319 319 config[k] = v
320 320
321 321
322 322 def invalidate_cache(cache_key, *args):
323 323 """
324 324 Puts cache invalidation task into db for
325 325 further global cache invalidation
326 326 """
327 327
328 328 from rhodecode.model.scm import ScmModel
329 329
330 330 if cache_key.startswith('get_repo_cached_'):
331 331 name = cache_key.split('get_repo_cached_')[-1]
332 332 ScmModel().mark_for_invalidation(name)
333 333
334 334
335 335 class EmptyChangeset(BaseChangeset):
336 336 """
337 337 An dummy empty changeset. It's possible to pass hash when creating
338 338 an EmptyChangeset
339 339 """
340 340
341 341 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
342 342 alias=None):
343 343 self._empty_cs = cs
344 344 self.revision = -1
345 345 self.message = ''
346 346 self.author = ''
347 347 self.date = ''
348 348 self.repository = repo
349 349 self.requested_revision = requested_revision
350 350 self.alias = alias
351 351
352 352 @LazyProperty
353 353 def raw_id(self):
354 354 """
355 355 Returns raw string identifying this changeset, useful for web
356 356 representation.
357 357 """
358 358
359 359 return self._empty_cs
360 360
361 361 @LazyProperty
362 362 def branch(self):
363 363 return get_backend(self.alias).DEFAULT_BRANCH_NAME
364 364
365 365 @LazyProperty
366 366 def short_id(self):
367 367 return self.raw_id[:12]
368 368
369 369 def get_file_changeset(self, path):
370 370 return self
371 371
372 372 def get_file_content(self, path):
373 373 return u''
374 374
375 375 def get_file_size(self, path):
376 376 return 0
377 377
378 378
379 379 def map_groups(groups):
380 380 """
381 381 Checks for groups existence, and creates groups structures.
382 382 It returns last group in structure
383 383
384 384 :param groups: list of groups structure
385 385 """
386 386 sa = meta.Session
387 387
388 388 parent = None
389 389 group = None
390 390
391 391 # last element is repo in nested groups structure
392 392 groups = groups[:-1]
393 393 rgm = ReposGroupModel(sa)
394 394 for lvl, group_name in enumerate(groups):
395 395 group_name = '/'.join(groups[:lvl] + [group_name])
396 396 group = RepoGroup.get_by_group_name(group_name)
397 397 desc = '%s group' % group_name
398 398
399 399 # # WTF that doesn't work !?
400 400 # if group is None:
401 401 # group = rgm.create(group_name, desc, parent, just_db=True)
402 402 # sa.commit()
403 403
404 404 # skip folders that are now removed repos
405 405 if REMOVED_REPO_PAT.match(group_name):
406 406 break
407 407
408 408 if group is None:
409 409 log.debug('creating group level: %s group_name: %s' % (lvl, group_name))
410 410 group = RepoGroup(group_name, parent)
411 411 group.group_description = desc
412 412 sa.add(group)
413 413 rgm._create_default_perms(group)
414 414 sa.commit()
415 415 parent = group
416 416 return group
417 417
418 418
419 419 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
420 420 """
421 421 maps all repos given in initial_repo_list, non existing repositories
422 422 are created, if remove_obsolete is True it also check for db entries
423 423 that are not in initial_repo_list and removes them.
424 424
425 425 :param initial_repo_list: list of repositories found by scanning methods
426 426 :param remove_obsolete: check for obsolete entries in database
427 427 """
428 428 from rhodecode.model.repo import RepoModel
429 429 sa = meta.Session
430 430 rm = RepoModel()
431 431 user = sa.query(User).filter(User.admin == True).first()
432 432 if user is None:
433 433 raise Exception('Missing administrative account !')
434 434 added = []
435 435
436 436 for name, repo in initial_repo_list.items():
437 437 group = map_groups(name.split(Repository.url_sep()))
438 438 if not rm.get_by_repo_name(name, cache=False):
439 439 log.info('repository %s not found creating default' % name)
440 440 added.append(name)
441 441 form_data = {
442 442 'repo_name': name,
443 443 'repo_name_full': name,
444 444 'repo_type': repo.alias,
445 445 'description': repo.description \
446 446 if repo.description != 'unknown' else '%s repository' % name,
447 447 'private': False,
448 448 'group_id': getattr(group, 'group_id', None)
449 449 }
450 450 rm.create(form_data, user, just_db=True)
451 451 sa.commit()
452 452 removed = []
453 453 if remove_obsolete:
454 454 #remove from database those repositories that are not in the filesystem
455 455 for repo in sa.query(Repository).all():
456 456 if repo.repo_name not in initial_repo_list.keys():
457 457 removed.append(repo.repo_name)
458 458 sa.delete(repo)
459 459 sa.commit()
460 460
461 461 return added, removed
462 462
463 463
464 464 # set cache regions for beaker so celery can utilise it
465 465 def add_cache(settings):
466 466 cache_settings = {'regions': None}
467 467 for key in settings.keys():
468 468 for prefix in ['beaker.cache.', 'cache.']:
469 469 if key.startswith(prefix):
470 470 name = key.split(prefix)[1].strip()
471 471 cache_settings[name] = settings[key].strip()
472 472 if cache_settings['regions']:
473 473 for region in cache_settings['regions'].split(','):
474 474 region = region.strip()
475 475 region_settings = {}
476 476 for key, value in cache_settings.items():
477 477 if key.startswith(region):
478 478 region_settings[key.split('.')[1]] = value
479 479 region_settings['expire'] = int(region_settings.get('expire',
480 480 60))
481 481 region_settings.setdefault('lock_dir',
482 482 cache_settings.get('lock_dir'))
483 483 region_settings.setdefault('data_dir',
484 484 cache_settings.get('data_dir'))
485 485
486 486 if 'type' not in region_settings:
487 487 region_settings['type'] = cache_settings.get('type',
488 488 'memory')
489 489 beaker.cache.cache_regions[region] = region_settings
490 490
491 491
492 492 #==============================================================================
493 493 # TEST FUNCTIONS AND CREATORS
494 494 #==============================================================================
495 495 def create_test_index(repo_location, config, full_index):
496 496 """
497 497 Makes default test index
498 498
499 499 :param config: test config
500 500 :param full_index:
501 501 """
502 502
503 503 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
504 504 from rhodecode.lib.pidlock import DaemonLock, LockHeld
505 505
506 506 repo_location = repo_location
507 507
508 508 index_location = os.path.join(config['app_conf']['index_dir'])
509 509 if not os.path.exists(index_location):
510 510 os.makedirs(index_location)
511 511
512 512 try:
513 513 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
514 514 WhooshIndexingDaemon(index_location=index_location,
515 515 repo_location=repo_location)\
516 516 .run(full_index=full_index)
517 517 l.release()
518 518 except LockHeld:
519 519 pass
520 520
521 521
522 522 def create_test_env(repos_test_path, config):
523 523 """
524 524 Makes a fresh database and
525 525 install test repository into tmp dir
526 526 """
527 527 from rhodecode.lib.db_manage import DbManage
528 528 from rhodecode.tests import HG_REPO, TESTS_TMP_PATH
529 529
530 530 # PART ONE create db
531 531 dbconf = config['sqlalchemy.db1.url']
532 532 log.debug('making test db %s' % dbconf)
533 533
534 534 # create test dir if it doesn't exist
535 535 if not os.path.isdir(repos_test_path):
536 536 log.debug('Creating testdir %s' % repos_test_path)
537 537 os.makedirs(repos_test_path)
538 538
539 539 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
540 540 tests=True)
541 541 dbmanage.create_tables(override=True)
542 542 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
543 543 dbmanage.create_default_user()
544 544 dbmanage.admin_prompt()
545 545 dbmanage.create_permissions()
546 546 dbmanage.populate_default_permissions()
547 547 Session.commit()
548 548 # PART TWO make test repo
549 549 log.debug('making test vcs repositories')
550 550
551 551 idx_path = config['app_conf']['index_dir']
552 552 data_path = config['app_conf']['cache_dir']
553 553
554 554 #clean index and data
555 555 if idx_path and os.path.exists(idx_path):
556 556 log.debug('remove %s' % idx_path)
557 557 shutil.rmtree(idx_path)
558 558
559 559 if data_path and os.path.exists(data_path):
560 560 log.debug('remove %s' % data_path)
561 561 shutil.rmtree(data_path)
562 562
563 563 #CREATE DEFAULT HG REPOSITORY
564 564 cur_dir = dn(dn(abspath(__file__)))
565 565 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
566 566 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
567 567 tar.close()
568 568
569 569
570 570 #==============================================================================
571 571 # PASTER COMMANDS
572 572 #==============================================================================
573 573 class BasePasterCommand(Command):
574 574 """
575 575 Abstract Base Class for paster commands.
576 576
577 577 The celery commands are somewhat aggressive about loading
578 578 celery.conf, and since our module sets the `CELERY_LOADER`
579 579 environment variable to our loader, we have to bootstrap a bit and
580 580 make sure we've had a chance to load the pylons config off of the
581 581 command line, otherwise everything fails.
582 582 """
583 583 min_args = 1
584 584 min_args_error = "Please provide a paster config file as an argument."
585 585 takes_config_file = 1
586 586 requires_config_file = True
587 587
588 588 def notify_msg(self, msg, log=False):
589 589 """Make a notification to user, additionally if logger is passed
590 590 it logs this action using given logger
591 591
592 592 :param msg: message that will be printed to user
593 593 :param log: logging instance, to use to additionally log this message
594 594
595 595 """
596 596 if log and isinstance(log, logging):
597 597 log(msg)
598 598
599 599 def run(self, args):
600 600 """
601 601 Overrides Command.run
602 602
603 603 Checks for a config file argument and loads it.
604 604 """
605 605 if len(args) < self.min_args:
606 606 raise BadCommand(
607 607 self.min_args_error % {'min_args': self.min_args,
608 608 'actual_args': len(args)})
609 609
610 610 # Decrement because we're going to lob off the first argument.
611 611 # @@ This is hacky
612 612 self.min_args -= 1
613 613 self.bootstrap_config(args[0])
614 614 self.update_parser()
615 615 return super(BasePasterCommand, self).run(args[1:])
616 616
617 617 def update_parser(self):
618 618 """
619 619 Abstract method. Allows for the class's parser to be updated
620 620 before the superclass's `run` method is called. Necessary to
621 621 allow options/arguments to be passed through to the underlying
622 622 celery command.
623 623 """
624 624 raise NotImplementedError("Abstract Method.")
625 625
626 626 def bootstrap_config(self, conf):
627 627 """
628 628 Loads the pylons configuration.
629 629 """
630 630 from pylons import config as pylonsconfig
631 631
632 path_to_ini_file = os.path.realpath(conf)
633 conf = paste.deploy.appconfig('config:' + path_to_ini_file)
632 self.path_to_ini_file = os.path.realpath(conf)
633 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
634 634 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
General Comments 0
You need to be logged in to leave comments. Login now