##// END OF EJS Templates
Celery is configured by the .ini files and run from paster now...
marcink -
r776:f6c613fb beta
parent child Browse files
Show More
@@ -0,0 +1,16 b''
1 """
2 Automatically sets the environment variable `CELERY_LOADER` to
3 `celerypylons.loader:PylonsLoader`. This ensures the loader is
4 specified when accessing the rest of this package, and allows celery
5 to be installed in a webapp just by importing celerypylons::
6
7 import celerypylons
8
9 """
10 import os
11 import warnings
12
13 CELERYPYLONS_LOADER = 'rhodecode.lib.celerypylons.loader.PylonsLoader'
14 if os.environ.get('CELERY_LOADER', CELERYPYLONS_LOADER) != CELERYPYLONS_LOADER:
15 warnings.warn("'CELERY_LOADER' environment variable will be overridden by celery-pylons.")
16 os.environ['CELERY_LOADER'] = CELERYPYLONS_LOADER
@@ -0,0 +1,143 b''
1 import os
2 from paste.script.command import Command, BadCommand
3 import paste.deploy
4 from pylons import config
5
6
7 __all__ = ['CeleryDaemonCommand', 'CeleryBeatCommand',
8 'CAMQPAdminCommand', 'CeleryEventCommand']
9
10
11 class CeleryCommand(Command):
12 """
13 Abstract Base Class for celery commands.
14
15 The celery commands are somewhat aggressive about loading
16 celery.conf, and since our module sets the `CELERY_LOADER`
17 environment variable to our loader, we have to bootstrap a bit and
18 make sure we've had a chance to load the pylons config off of the
19 command line, otherwise everything fails.
20 """
21 min_args = 1
22 min_args_error = "Please provide a paster config file as an argument."
23 takes_config_file = 1
24 requires_config_file = True
25
26 def run(self, args):
27 """
28 Overrides Command.run
29
30 Checks for a config file argument and loads it.
31 """
32 if len(args) < self.min_args:
33 raise BadCommand(
34 self.min_args_error % {'min_args': self.min_args,
35 'actual_args': len(args)})
36 # Decrement because we're going to lob off the first argument.
37 # @@ This is hacky
38 self.min_args -= 1
39 self.bootstrap_config(args[0])
40 self.update_parser()
41 return super(CeleryCommand, self).run(args[1:])
42
43 def update_parser(self):
44 """
45 Abstract method. Allows for the class's parser to be updated
46 before the superclass's `run` method is called. Necessary to
47 allow options/arguments to be passed through to the underlying
48 celery command.
49 """
50 raise NotImplementedError("Abstract Method.")
51
52 def bootstrap_config(self, conf):
53 """
54 Loads the pylons configuration.
55 """
56 path_to_ini_file = os.path.realpath(conf)
57 conf = paste.deploy.appconfig('config:' + path_to_ini_file)
58 config.init_app(conf.global_conf, conf.local_conf)
59
60
61 class CeleryDaemonCommand(CeleryCommand):
62 """Start the celery worker
63
64 Starts the celery worker that uses a paste.deploy configuration
65 file.
66 """
67 usage = 'CONFIG_FILE [celeryd options...]'
68 summary = __doc__.splitlines()[0]
69 description = "".join(__doc__.splitlines()[2:])
70
71 parser = Command.standard_parser(quiet=True)
72
73 def update_parser(self):
74 from celery.bin import celeryd
75 for x in celeryd.WorkerCommand().get_options():
76 self.parser.add_option(x)
77
78 def command(self):
79 from celery.bin import celeryd
80 return celeryd.WorkerCommand().run(**vars(self.options))
81
82
83 class CeleryBeatCommand(CeleryCommand):
84 """Start the celery beat server
85
86 Starts the celery beat server using a paste.deploy configuration
87 file.
88 """
89 usage = 'CONFIG_FILE [celerybeat options...]'
90 summary = __doc__.splitlines()[0]
91 description = "".join(__doc__.splitlines()[2:])
92
93 parser = Command.standard_parser(quiet=True)
94
95 def update_parser(self):
96 from celery.bin import celerybeat
97 for x in celerybeat.BeatCommand().get_options():
98 self.parser.add_option(x)
99
100 def command(self):
101 from celery.bin import celerybeat
102 return celerybeat.BeatCommand(**vars(self.options))
103
104 class CAMQPAdminCommand(CeleryCommand):
105 """CAMQP Admin
106
107 CAMQP celery admin tool.
108 """
109 usage = 'CONFIG_FILE [camqadm options...]'
110 summary = __doc__.splitlines()[0]
111 description = "".join(__doc__.splitlines()[2:])
112
113 parser = Command.standard_parser(quiet=True)
114
115 def update_parser(self):
116 from celery.bin import camqadm
117 for x in camqadm.OPTION_LIST:
118 self.parser.add_option(x)
119
120 def command(self):
121 from celery.bin import camqadm
122 return camqadm.camqadm(*self.args, **vars(self.options))
123
124
125 class CeleryEventCommand(CeleryCommand):
126 """Celery event commandd.
127
128 Capture celery events.
129 """
130 usage = 'CONFIG_FILE [celeryev options...]'
131 summary = __doc__.splitlines()[0]
132 description = "".join(__doc__.splitlines()[2:])
133
134 parser = Command.standard_parser(quiet=True)
135
136 def update_parser(self):
137 from celery.bin import celeryev
138 for x in celeryev.OPTION_LIST:
139 self.parser.add_option(x)
140
141 def command(self):
142 from celery.bin import celeryev
143 return celeryev.run_celeryev(**vars(self.options))
@@ -0,0 +1,55 b''
1 from celery.loaders.base import BaseLoader
2 from pylons import config
3
4 to_pylons = lambda x: x.replace('_', '.').lower()
5 to_celery = lambda x: x.replace('.', '_').upper()
6
7 LIST_PARAMS = """CELERY_IMPORTS ADMINS ROUTES""".split()
8
9
10 class PylonsSettingsProxy(object):
11 """Pylons Settings Proxy
12
13 Proxies settings from pylons.config
14
15 """
16 def __getattr__(self, key):
17 pylons_key = to_pylons(key)
18 try:
19 value = config[pylons_key]
20 if key in LIST_PARAMS: return value.split()
21 return self.type_converter(value)
22 except KeyError:
23 raise AttributeError(pylons_key)
24
25 def __setattr__(self, key, value):
26 pylons_key = to_pylons(key)
27 config[pylons_key] = value
28
29
30 def type_converter(self, value):
31 #cast to int
32 if value.isdigit():
33 return int(value)
34
35 #cast to bool
36 if value.lower() in ['true', 'false']:
37 return value.lower() == 'true'
38
39 return value
40
41 class PylonsLoader(BaseLoader):
42 """Pylons celery loader
43
44 Maps the celery config onto pylons.config
45
46 """
47 def read_configuration(self):
48 self.configured = True
49 return PylonsSettingsProxy()
50
51 def on_worker_init(self):
52 """
53 Import task modules.
54 """
55 self.import_default_modules()
@@ -1,187 +1,214 b''
1 1 ################################################################################
2 2 ################################################################################
3 # rhodecode - Pylons environment configuration #
3 # RhodeCode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 ################################################################################
11 11 ## Uncomment and replace with the address which should receive ##
12 ## any error reports after application crash ##
13 ## Additionally those settings will be used by rhodecode mailing system ##
12 ## any error reports after application crash ##
13 ## Additionally those settings will be used by RhodeCode mailing system ##
14 14 ################################################################################
15 15 #email_to = admin@localhost
16 16 #error_email_from = paste_error@localhost
17 17 #app_email_from = rhodecode-noreply@localhost
18 18 #error_message =
19 19
20 20 #smtp_server = mail.server.com
21 21 #smtp_username =
22 #smtp_password =
22 #smtp_password =
23 23 #smtp_port =
24 #smtp_use_tls =
24 #smtp_use_tls = false
25 #smtp_use_ssl = true
25 26
26 27 [server:main]
27 28 ##nr of threads to spawn
28 29 threadpool_workers = 5
29 30
30 ##max request before
31 ##max request before thread respawn
31 32 threadpool_max_requests = 6
32 33
33 34 ##option to use threads of process
34 35 use_threadpool = false
35 36
36 37 use = egg:Paste#http
37 38 host = 0.0.0.0
38 39 port = 5000
39 40
40 41 [app:main]
41 42 use = egg:rhodecode
42 43 full_stack = true
43 44 static_files = true
44 45 lang=en
45 46 cache_dir = %(here)s/data
46 47 index_dir = %(here)s/data/index
47 48
48 49 ####################################
50 ### CELERY CONFIG ####
51 ####################################
52 use_celery = false
53 broker.host = localhost
54 broker.vhost = rabbitmqhost
55 broker.port = 5672
56 broker.user = rabbitmq
57 broker.password = qweqwe
58
59 celery.imports = rhodecode.lib.celerylib.tasks
60
61 celery.result.backend = amqp
62 celery.result.dburi = amqp://
63 celery.result.serialier = json
64
65 #celery.send.task.error.emails = true
66 #celery.amqp.task.result.expires = 18000
67
68 celeryd.concurrency = 2
69 #celeryd.log.file = celeryd.log
70 celeryd.log.level = debug
71 celeryd.max.tasks.per.child = 3
72
73 #tasks will never be sent to the queue, but executed locally instead.
74 celery.always.eager = false
75
76 ####################################
49 77 ### BEAKER CACHE ####
50 78 ####################################
51 79 beaker.cache.data_dir=/%(here)s/data/cache/data
52 80 beaker.cache.lock_dir=/%(here)s/data/cache/lock
53 81 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
54 82
55 83 beaker.cache.super_short_term.type=memory
56 84 beaker.cache.super_short_term.expire=10
57 85
58 86 beaker.cache.short_term.type=memory
59 87 beaker.cache.short_term.expire=60
60 88
61 89 beaker.cache.long_term.type=memory
62 90 beaker.cache.long_term.expire=36000
63 91
64
65 92 beaker.cache.sql_cache_short.type=memory
66 beaker.cache.sql_cache_short.expire=5
93 beaker.cache.sql_cache_short.expire=10
67 94
68 95 beaker.cache.sql_cache_med.type=memory
69 96 beaker.cache.sql_cache_med.expire=360
70 97
71 98 beaker.cache.sql_cache_long.type=file
72 99 beaker.cache.sql_cache_long.expire=3600
73 100
74 101 ####################################
75 102 ### BEAKER SESSION ####
76 103 ####################################
77 104 ## Type of storage used for the session, current types are
78 ## "dbm", "file", "memcached", "database", and "memory".
105 ## dbm, file, memcached, database, and memory.
79 106 ## The storage uses the Container API
80 107 ##that is also used by the cache system.
81 108 beaker.session.type = file
82 109
83 110 beaker.session.key = rhodecode
84 111 beaker.session.secret = g654dcno0-9873jhgfreyu
85 112 beaker.session.timeout = 36000
86 113
87 114 ##auto save the session to not to use .save()
88 115 beaker.session.auto = False
89 116
90 117 ##true exire at browser close
91 118 #beaker.session.cookie_expires = 3600
92 119
93 120
94 121 ################################################################################
95 122 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
96 123 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
97 124 ## execute malicious code after an exception is raised. ##
98 125 ################################################################################
99 126 #set debug = false
100 127
101 128 ##################################
102 129 ### LOGVIEW CONFIG ###
103 130 ##################################
104 131 logview.sqlalchemy = #faa
105 132 logview.pylons.templating = #bfb
106 133 logview.pylons.util = #eee
107 134
108 135 #########################################################
109 136 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
110 137 #########################################################
111 138 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
112 139 #sqlalchemy.db1.echo = False
113 140 #sqlalchemy.db1.pool_recycle = 3600
114 141 sqlalchemy.convert_unicode = true
115 142
116 143 ################################
117 144 ### LOGGING CONFIGURATION ####
118 145 ################################
119 146 [loggers]
120 147 keys = root, routes, rhodecode, sqlalchemy,beaker,templates
121 148
122 149 [handlers]
123 150 keys = console
124 151
125 152 [formatters]
126 153 keys = generic,color_formatter
127 154
128 155 #############
129 156 ## LOGGERS ##
130 157 #############
131 158 [logger_root]
132 159 level = NOTSET
133 160 handlers = console
134 161
135 162 [logger_routes]
136 163 level = DEBUG
137 164 handlers = console
138 165 qualname = routes.middleware
139 166 # "level = DEBUG" logs the route matched and routing variables.
140 167 propagate = 0
141 168
142 169 [logger_beaker]
143 170 level = ERROR
144 171 handlers = console
145 172 qualname = beaker.container
146 173 propagate = 0
147 174
148 175 [logger_templates]
149 176 level = INFO
150 177 handlers = console
151 178 qualname = pylons.templating
152 179 propagate = 0
153 180
154 181 [logger_rhodecode]
155 182 level = DEBUG
156 183 handlers = console
157 184 qualname = rhodecode
158 185 propagate = 0
159 186
160 187 [logger_sqlalchemy]
161 188 level = ERROR
162 189 handlers = console
163 190 qualname = sqlalchemy.engine
164 191 propagate = 0
165 192
166 193 ##############
167 194 ## HANDLERS ##
168 195 ##############
169 196
170 197 [handler_console]
171 198 class = StreamHandler
172 199 args = (sys.stderr,)
173 200 level = NOTSET
174 201 formatter = color_formatter
175 202
176 203 ################
177 204 ## FORMATTERS ##
178 205 ################
179 206
180 207 [formatter_generic]
181 208 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
182 209 datefmt = %Y-%m-%d %H:%M:%S
183 210
184 211 [formatter_color_formatter]
185 212 class=rhodecode.lib.colored_formatter.ColorFormatter
186 213 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
187 214 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
@@ -1,176 +1,203 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # rhodecode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 ################################################################################
11 11 ## Uncomment and replace with the address which should receive ##
12 12 ## any error reports after application crash ##
13 13 ## Additionally those settings will be used by rhodecode mailing system ##
14 14 ################################################################################
15 15 #email_to = admin@localhost
16 16 #error_email_from = paste_error@localhost
17 17 #app_email_from = rhodecode-noreply@localhost
18 18 #error_message =
19 19
20 20 #smtp_server = mail.server.com
21 21 #smtp_username =
22 22 #smtp_password =
23 23 #smtp_port =
24 24 #smtp_use_tls = false
25 25 #smtp_use_ssl = true
26 26
27 27 [server:main]
28 28 ##nr of threads to spawn
29 29 threadpool_workers = 5
30 30
31 31 ##max request before thread respawn
32 32 threadpool_max_requests = 2
33 33
34 34 ##option to use threads of process
35 35 use_threadpool = true
36 36
37 37 use = egg:Paste#http
38 38 host = 127.0.0.1
39 39 port = 8001
40 40
41 41 [app:main]
42 42 use = egg:rhodecode
43 43 full_stack = true
44 44 static_files = false
45 45 lang=en
46 46 cache_dir = %(here)s/data
47 47 index_dir = %(here)s/data/index
48 48
49 49 ####################################
50 ### CELERY CONFIG ####
51 ####################################
52 use_celery = false
53 broker.host = localhost
54 broker.vhost = rabbitmqhost
55 broker.port = 5672
56 broker.user = rabbitmq
57 broker.password = qweqwe
58
59 celery.imports = rhodecode.lib.celerylib.tasks
60
61 celery.result.backend = amqp
62 celery.result.dburi = amqp://
63 celery.result.serialier = json
64
65 #celery.send.task.error.emails = true
66 #celery.amqp.task.result.expires = 18000
67
68 celeryd.concurrency = 2
69 #celeryd.log.file = celeryd.log
70 celeryd.log.level = debug
71 celeryd.max.tasks.per.child = 3
72
73 #tasks will never be sent to the queue, but executed locally instead.
74 celery.always.eager = false
75
76 ####################################
50 77 ### BEAKER CACHE ####
51 78 ####################################
52 79 beaker.cache.data_dir=/%(here)s/data/cache/data
53 80 beaker.cache.lock_dir=/%(here)s/data/cache/lock
54 81 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
55 82
56 83 beaker.cache.super_short_term.type=memory
57 84 beaker.cache.super_short_term.expire=10
58 85
59 86 beaker.cache.short_term.type=memory
60 87 beaker.cache.short_term.expire=60
61 88
62 89 beaker.cache.long_term.type=memory
63 90 beaker.cache.long_term.expire=36000
64 91
65 92
66 93 beaker.cache.sql_cache_short.type=memory
67 94 beaker.cache.sql_cache_short.expire=5
68 95
69 96 beaker.cache.sql_cache_med.type=memory
70 97 beaker.cache.sql_cache_med.expire=360
71 98
72 99 beaker.cache.sql_cache_long.type=file
73 100 beaker.cache.sql_cache_long.expire=3600
74 101
75 102 ####################################
76 103 ### BEAKER SESSION ####
77 104 ####################################
78 105 ## Type of storage used for the session, current types are
79 106 ## dbm, file, memcached, database, and memory.
80 107 ## The storage uses the Container API
81 108 ##that is also used by the cache system.
82 109 beaker.session.type = file
83 110
84 111 beaker.session.key = rhodecode
85 112 beaker.session.secret = g654dcno0-9873jhgfreyu
86 113 beaker.session.timeout = 36000
87 114
88 115 ##auto save the session to not to use .save()
89 116 beaker.session.auto = False
90 117
91 118 ##true exire at browser close
92 119 #beaker.session.cookie_expires = 3600
93 120
94 121
95 122 ################################################################################
96 123 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
97 124 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
98 125 ## execute malicious code after an exception is raised. ##
99 126 ################################################################################
100 127 set debug = false
101 128
102 129 ##################################
103 130 ### LOGVIEW CONFIG ###
104 131 ##################################
105 132 logview.sqlalchemy = #faa
106 133 logview.pylons.templating = #bfb
107 134 logview.pylons.util = #eee
108 135
109 136 #########################################################
110 137 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
111 138 #########################################################
112 139 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
113 140 #sqlalchemy.db1.echo = False
114 141 #sqlalchemy.db1.pool_recycle = 3600
115 142 sqlalchemy.convert_unicode = true
116 143
117 144 ################################
118 145 ### LOGGING CONFIGURATION ####
119 146 ################################
120 147 [loggers]
121 148 keys = root, routes, rhodecode, sqlalchemy
122 149
123 150 [handlers]
124 151 keys = console
125 152
126 153 [formatters]
127 154 keys = generic,color_formatter
128 155
129 156 #############
130 157 ## LOGGERS ##
131 158 #############
132 159 [logger_root]
133 160 level = INFO
134 161 handlers = console
135 162
136 163 [logger_routes]
137 164 level = INFO
138 165 handlers = console
139 166 qualname = routes.middleware
140 167 # "level = DEBUG" logs the route matched and routing variables.
141 168 propagate = 0
142 169
143 170 [logger_rhodecode]
144 171 level = DEBUG
145 172 handlers = console
146 173 qualname = rhodecode
147 174 propagate = 0
148 175
149 176 [logger_sqlalchemy]
150 177 level = ERROR
151 178 handlers = console
152 179 qualname = sqlalchemy.engine
153 180 propagate = 0
154 181
155 182 ##############
156 183 ## HANDLERS ##
157 184 ##############
158 185
159 186 [handler_console]
160 187 class = StreamHandler
161 188 args = (sys.stderr,)
162 189 level = NOTSET
163 190 formatter = color_formatter
164 191
165 192 ################
166 193 ## FORMATTERS ##
167 194 ################
168 195
169 196 [formatter_generic]
170 197 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
171 198 datefmt = %Y-%m-%d %H:%M:%S
172 199
173 200 [formatter_color_formatter]
174 201 class=rhodecode.lib.colored_formatter.ColorFormatter
175 202 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
176 203 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
@@ -1,176 +1,203 b''
1 1 ################################################################################
2 2 ################################################################################
3 # rhodecode - Pylons environment configuration #
3 # RhodeCode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 ################################################################################
11 11 ## Uncomment and replace with the address which should receive ##
12 12 ## any error reports after application crash ##
13 ## Additionally those settings will be used by rhodecode mailing system ##
13 ## Additionally those settings will be used by RhodeCode mailing system ##
14 14 ################################################################################
15 15 #email_to = admin@localhost
16 16 #error_email_from = paste_error@localhost
17 17 #app_email_from = rhodecode-noreply@localhost
18 18 #error_message =
19 19
20 20 #smtp_server = mail.server.com
21 21 #smtp_username =
22 22 #smtp_password =
23 23 #smtp_port =
24 24 #smtp_use_tls = false
25 25 #smtp_use_ssl = true
26 26
27 27 [server:main]
28 28 ##nr of threads to spawn
29 29 threadpool_workers = 5
30 30
31 31 ##max request before thread respawn
32 32 threadpool_max_requests = 10
33 33
34 34 ##option to use threads of process
35 35 use_threadpool = true
36 36
37 37 use = egg:Paste#http
38 38 host = 127.0.0.1
39 39 port = 5000
40 40
41 41 [app:main]
42 42 use = egg:rhodecode
43 43 full_stack = true
44 44 static_files = true
45 45 lang=en
46 46 cache_dir = %(here)s/data
47 47 index_dir = %(here)s/data/index
48 48 app_instance_uuid = ${app_instance_uuid}
49 49
50 50 ####################################
51 ### CELERY CONFIG ####
52 ####################################
53 use_celery = false
54 broker.host = localhost
55 broker.vhost = rabbitmqhost
56 broker.port = 5672
57 broker.user = rabbitmq
58 broker.password = qweqwe
59
60 celery.imports = rhodecode.lib.celerylib.tasks
61
62 celery.result.backend = amqp
63 celery.result.dburi = amqp://
64 celery.result.serialier = json
65
66 #celery.send.task.error.emails = true
67 #celery.amqp.task.result.expires = 18000
68
69 celeryd.concurrency = 2
70 #celeryd.log.file = celeryd.log
71 celeryd.log.level = debug
72 celeryd.max.tasks.per.child = 3
73
74 #tasks will never be sent to the queue, but executed locally instead.
75 celery.always.eager = false
76
77 ####################################
51 78 ### BEAKER CACHE ####
52 79 ####################################
53 80 beaker.cache.data_dir=/%(here)s/data/cache/data
54 81 beaker.cache.lock_dir=/%(here)s/data/cache/lock
55 82 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
56 83
57 84 beaker.cache.super_short_term.type=memory
58 85 beaker.cache.super_short_term.expire=10
59 86
60 87 beaker.cache.short_term.type=memory
61 88 beaker.cache.short_term.expire=60
62 89
63 90 beaker.cache.long_term.type=memory
64 91 beaker.cache.long_term.expire=36000
65 92
66 93 beaker.cache.sql_cache_short.type=memory
67 beaker.cache.sql_cache_short.expire=5
94 beaker.cache.sql_cache_short.expire=10
68 95
69 96 beaker.cache.sql_cache_med.type=memory
70 97 beaker.cache.sql_cache_med.expire=360
71 98
72 99 beaker.cache.sql_cache_long.type=file
73 100 beaker.cache.sql_cache_long.expire=3600
74 101
75 102 ####################################
76 103 ### BEAKER SESSION ####
77 104 ####################################
78 105 ## Type of storage used for the session, current types are
79 106 ## dbm, file, memcached, database, and memory.
80 107 ## The storage uses the Container API
81 108 ##that is also used by the cache system.
82 109 beaker.session.type = file
83 110
84 111 beaker.session.key = rhodecode
85 112 beaker.session.secret = ${app_instance_secret}
86 113 beaker.session.timeout = 36000
87 114
88 115 ##auto save the session to not to use .save()
89 116 beaker.session.auto = False
90 117
91 118 ##true exire at browser close
92 119 #beaker.session.cookie_expires = 3600
93 120
94 121
95 122 ################################################################################
96 123 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
97 124 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
98 125 ## execute malicious code after an exception is raised. ##
99 126 ################################################################################
100 127 set debug = false
101 128
102 129 ##################################
103 130 ### LOGVIEW CONFIG ###
104 131 ##################################
105 132 logview.sqlalchemy = #faa
106 133 logview.pylons.templating = #bfb
107 134 logview.pylons.util = #eee
108 135
109 136 #########################################################
110 137 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
111 138 #########################################################
112 139 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
113 140 #sqlalchemy.db1.echo = False
114 141 #sqlalchemy.db1.pool_recycle = 3600
115 142 sqlalchemy.convert_unicode = true
116 143
117 144 ################################
118 145 ### LOGGING CONFIGURATION ####
119 146 ################################
120 147 [loggers]
121 148 keys = root, routes, rhodecode, sqlalchemy
122 149
123 150 [handlers]
124 151 keys = console
125 152
126 153 [formatters]
127 154 keys = generic,color_formatter
128 155
129 156 #############
130 157 ## LOGGERS ##
131 158 #############
132 159 [logger_root]
133 160 level = INFO
134 161 handlers = console
135 162
136 163 [logger_routes]
137 164 level = INFO
138 165 handlers = console
139 166 qualname = routes.middleware
140 167 # "level = DEBUG" logs the route matched and routing variables.
141 168 propagate = 0
142 169
143 170 [logger_rhodecode]
144 171 level = DEBUG
145 172 handlers = console
146 173 qualname = rhodecode
147 174 propagate = 0
148 175
149 176 [logger_sqlalchemy]
150 177 level = ERROR
151 178 handlers = console
152 179 qualname = sqlalchemy.engine
153 180 propagate = 0
154 181
155 182 ##############
156 183 ## HANDLERS ##
157 184 ##############
158 185
159 186 [handler_console]
160 187 class = StreamHandler
161 188 args = (sys.stderr,)
162 189 level = NOTSET
163 190 formatter = color_formatter
164 191
165 192 ################
166 193 ## FORMATTERS ##
167 194 ################
168 195
169 196 [formatter_generic]
170 197 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
171 198 datefmt = %Y-%m-%d %H:%M:%S
172 199
173 200 [formatter_color_formatter]
174 201 class=rhodecode.lib.colored_formatter.ColorFormatter
175 202 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
176 203 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
@@ -1,64 +1,74 b''
1 import os
2 import sys
3 import socket
4 import traceback
5 import logging
6
1 7 from rhodecode.lib.pidlock import DaemonLock, LockHeld
2 8 from vcs.utils.lazy import LazyProperty
3 9 from decorator import decorator
4 import logging
5 import os
6 import sys
7 import traceback
8 10 from hashlib import md5
9 import socket
11 from pylons import config
12
10 13 log = logging.getLogger(__name__)
11 14
15 def str2bool(v):
16 return v.lower() in ["yes", "true", "t", "1"] if v else None
17
18 CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
19
12 20 class ResultWrapper(object):
13 21 def __init__(self, task):
14 22 self.task = task
15
23
16 24 @LazyProperty
17 25 def result(self):
18 26 return self.task
19 27
20 28 def run_task(task, *args, **kwargs):
21 try:
22 t = task.delay(*args, **kwargs)
23 log.info('running task %s', t.task_id)
24 return t
25 except socket.error, e:
26 if e.errno == 111:
27 log.debug('Unable to connect to celeryd. Sync execution')
28 else:
29 log.error(traceback.format_exc())
30 except KeyError, e:
31 log.debug('Unable to connect to celeryd. Sync execution')
32 except Exception, e:
33 log.error(traceback.format_exc())
34
29 if CELERY_ON:
30 try:
31 t = task.delay(*args, **kwargs)
32 log.info('running task %s:%s', t.task_id, task)
33 return t
34 except socket.error, e:
35 if e.errno == 111:
36 log.debug('Unable to connect to celeryd. Sync execution')
37 else:
38 log.error(traceback.format_exc())
39 except KeyError, e:
40 log.debug('Unable to connect to celeryd. Sync execution')
41 except Exception, e:
42 log.error(traceback.format_exc())
43
44 log.debug('executing task %s in sync mode', task)
35 45 return ResultWrapper(task(*args, **kwargs))
36 46
37 47
38 48 def locked_task(func):
39 49 def __wrapper(func, *fargs, **fkwargs):
40 50 params = list(fargs)
41 51 params.extend(['%s-%s' % ar for ar in fkwargs.items()])
42
52
43 53 lockkey = 'task_%s' % \
44 54 md5(str(func.__name__) + '-' + \
45 55 '-'.join(map(str, params))).hexdigest()
46 56 log.info('running task with lockkey %s', lockkey)
47 57 try:
48 58 l = DaemonLock(lockkey)
49 59 ret = func(*fargs, **fkwargs)
50 60 l.release()
51 61 return ret
52 62 except LockHeld:
53 63 log.info('LockHeld')
54 return 'Task with key %s already running' % lockkey
64 return 'Task with key %s already running' % lockkey
55 65
56 return decorator(__wrapper, func)
57
66 return decorator(__wrapper, func)
67
68
58 69
59
60
61
62
63
64
70
71
72
73
74
@@ -1,324 +1,363 b''
1 1 from celery.decorators import task
2 2
3 3 import os
4 4 import traceback
5 import beaker
5 6 from time import mktime
6
7 7 from operator import itemgetter
8
9 from pylons import config
8 10 from pylons.i18n.translation import _
9 from rhodecode.lib.celerylib import run_task, locked_task
11
12 from rhodecode.lib.celerylib import run_task, locked_task, str2bool
10 13 from rhodecode.lib.helpers import person
11 14 from rhodecode.lib.smtp_mailer import SmtpMailer
12 15 from rhodecode.lib.utils import OrderedDict
16 from rhodecode.model import init_model
17 from rhodecode.model import meta
18 from rhodecode.model.db import RhodeCodeUi
19
13 20 from vcs.backends import get_repo
14 from rhodecode.model.db import RhodeCodeUi
21
22 from sqlalchemy import engine_from_config
15 23
16 24 try:
17 25 import json
18 26 except ImportError:
19 27 #python 2.5 compatibility
20 28 import simplejson as json
21 29
22 try:
23 from celeryconfig import PYLONS_CONFIG as config
24 celery_on = True
25 except ImportError:
26 #if celeryconfig is not present let's just load our pylons
27 #config instead
28 from pylons import config
29 celery_on = False
30
31
32 30 __all__ = ['whoosh_index', 'get_commits_stats',
33 31 'reset_user_password', 'send_email']
34 32
33 CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
34
35 35 def get_session():
36 if celery_on:
37 from sqlalchemy import engine_from_config
38 from sqlalchemy.orm import sessionmaker, scoped_session
39 engine = engine_from_config(dict(config.items('app:main')),
40 'sqlalchemy.db1.')
41 sa = scoped_session(sessionmaker(bind=engine))
42 else:
43 #If we don't use celery reuse our current application Session
44 from rhodecode.model.meta import Session
45 sa = Session()
46
36 if CELERY_ON:
37 engine = engine_from_config(config, 'sqlalchemy.db1.')
38 init_model(engine)
39 sa = meta.Session()
47 40 return sa
48 41
49 42 def get_repos_path():
50 43 sa = get_session()
51 44 q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
52 45 return q.ui_value
53 46
54 47 @task
55 48 @locked_task
56 49 def whoosh_index(repo_location, full_index):
57 50 log = whoosh_index.get_logger()
58 51 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
59 index_location = dict(config.items('app:main'))['index_dir']
52 index_location = config['index_dir']
60 53 WhooshIndexingDaemon(index_location=index_location,
61 54 repo_location=repo_location).run(full_index=full_index)
62 55
63 56 @task
64 57 @locked_task
65 58 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
66 59 from rhodecode.model.db import Statistics, Repository
67 60 log = get_commits_stats.get_logger()
68 61
69 62 #for js data compatibilty
70 63 author_key_cleaner = lambda k: person(k).replace('"', "")
71 64
72 65 commits_by_day_author_aggregate = {}
73 66 commits_by_day_aggregate = {}
74 67 repos_path = get_repos_path()
75 68 p = os.path.join(repos_path, repo_name)
76 69 repo = get_repo(p)
77 70
78 71 skip_date_limit = True
79 72 parse_limit = 250 #limit for single task changeset parsing optimal for
80 73 last_rev = 0
81 74 last_cs = None
82 75 timegetter = itemgetter('time')
83 76
84 77 sa = get_session()
85 78
86 79 dbrepo = sa.query(Repository)\
87 80 .filter(Repository.repo_name == repo_name).scalar()
88 81 cur_stats = sa.query(Statistics)\
89 82 .filter(Statistics.repository == dbrepo).scalar()
90 83 if cur_stats:
91 84 last_rev = cur_stats.stat_on_revision
92 85 if not repo.revisions:
93 86 return True
94 87
95 88 if last_rev == repo.revisions[-1] and len(repo.revisions) > 1:
96 89 #pass silently without any work if we're not on first revision or
97 90 #current state of parsing revision(from db marker) is the last revision
98 91 return True
99 92
100 93 if cur_stats:
101 94 commits_by_day_aggregate = OrderedDict(
102 95 json.loads(
103 96 cur_stats.commit_activity_combined))
104 97 commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
105 98
106 99 log.debug('starting parsing %s', parse_limit)
107 100 lmktime = mktime
108 101
109 102 for cnt, rev in enumerate(repo.revisions[last_rev:]):
110 103 last_cs = cs = repo.get_changeset(rev)
111 104 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
112 105 cs.date.timetuple()[2])
113 106 timetupple = [int(x) for x in k.split('-')]
114 107 timetupple.extend([0 for _ in xrange(6)])
115 108 k = lmktime(timetupple)
116 109 if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
117 110 try:
118 111 l = [timegetter(x) for x in commits_by_day_author_aggregate\
119 112 [author_key_cleaner(cs.author)]['data']]
120 113 time_pos = l.index(k)
121 114 except ValueError:
122 115 time_pos = False
123 116
124 117 if time_pos >= 0 and time_pos is not False:
125 118
126 119 datadict = commits_by_day_author_aggregate\
127 120 [author_key_cleaner(cs.author)]['data'][time_pos]
128 121
129 122 datadict["commits"] += 1
130 123 datadict["added"] += len(cs.added)
131 124 datadict["changed"] += len(cs.changed)
132 125 datadict["removed"] += len(cs.removed)
133 126
134 127 else:
135 128 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
136 129
137 130 datadict = {"time":k,
138 131 "commits":1,
139 132 "added":len(cs.added),
140 133 "changed":len(cs.changed),
141 134 "removed":len(cs.removed),
142 135 }
143 136 commits_by_day_author_aggregate\
144 137 [author_key_cleaner(cs.author)]['data'].append(datadict)
145 138
146 139 else:
147 140 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
148 141 commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
149 142 "label":author_key_cleaner(cs.author),
150 143 "data":[{"time":k,
151 144 "commits":1,
152 145 "added":len(cs.added),
153 146 "changed":len(cs.changed),
154 147 "removed":len(cs.removed),
155 148 }],
156 149 "schema":["commits"],
157 150 }
158 151
159 152 #gather all data by day
160 153 if commits_by_day_aggregate.has_key(k):
161 154 commits_by_day_aggregate[k] += 1
162 155 else:
163 156 commits_by_day_aggregate[k] = 1
164 157
165 158 if cnt >= parse_limit:
166 159 #don't fetch to much data since we can freeze application
167 160 break
168 161 overview_data = []
169 162 for k, v in commits_by_day_aggregate.items():
170 163 overview_data.append([k, v])
171 164 overview_data = sorted(overview_data, key=itemgetter(0))
172 165 if not commits_by_day_author_aggregate:
173 166 commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
174 167 "label":author_key_cleaner(repo.contact),
175 168 "data":[0, 1],
176 169 "schema":["commits"],
177 170 }
178 171
179 172 stats = cur_stats if cur_stats else Statistics()
180 173 stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
181 174 stats.commit_activity_combined = json.dumps(overview_data)
182 175
183 176 log.debug('last revison %s', last_rev)
184 177 leftovers = len(repo.revisions[last_rev:])
185 178 log.debug('revisions to parse %s', leftovers)
186 179
187 180 if last_rev == 0 or leftovers < parse_limit:
188 181 stats.languages = json.dumps(__get_codes_stats(repo_name))
189 182
190 183 stats.repository = dbrepo
191 184 stats.stat_on_revision = last_cs.revision
192 185
193 186 try:
194 187 sa.add(stats)
195 188 sa.commit()
196 189 except:
197 190 log.error(traceback.format_exc())
198 191 sa.rollback()
199 192 return False
200 193 if len(repo.revisions) > 1:
201 194 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
202 195
203 196 return True
204 197
205 198 @task
206 199 def reset_user_password(user_email):
207 200 log = reset_user_password.get_logger()
208 201 from rhodecode.lib import auth
209 202 from rhodecode.model.db import User
210 203
211 204 try:
212 205 try:
213 206 sa = get_session()
214 207 user = sa.query(User).filter(User.email == user_email).scalar()
215 208 new_passwd = auth.PasswordGenerator().gen_password(8,
216 209 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
217 210 if user:
218 211 user.password = auth.get_crypt_password(new_passwd)
219 212 sa.add(user)
220 213 sa.commit()
221 214 log.info('change password for %s', user_email)
222 215 if new_passwd is None:
223 216 raise Exception('unable to generate new password')
224 217
225 218 except:
226 219 log.error(traceback.format_exc())
227 220 sa.rollback()
228 221
229 222 run_task(send_email, user_email,
230 223 "Your new rhodecode password",
231 224 'Your new rhodecode password:%s' % (new_passwd))
232 225 log.info('send new password mail to %s', user_email)
233 226
234 227
235 228 except:
236 229 log.error('Failed to update user password')
237 230 log.error(traceback.format_exc())
231
238 232 return True
239 233
240 234 @task
241 235 def send_email(recipients, subject, body):
242 236 """
243 237 Sends an email with defined parameters from the .ini files.
244 238
245 239
246 240 :param recipients: list of recipients, it this is empty the defined email
247 241 address from field 'email_to' is used instead
248 242 :param subject: subject of the mail
249 243 :param body: body of the mail
250 244 """
251 245 log = send_email.get_logger()
252 email_config = dict(config.items('DEFAULT'))
246 email_config = config
253 247
254 248 if not recipients:
255 249 recipients = [email_config.get('email_to')]
256 250
257 def str2bool(v):
258 return v.lower() in ["yes", "true", "t", "1"] if v else None
259
260 251 mail_from = email_config.get('app_email_from')
261 252 user = email_config.get('smtp_username')
262 253 passwd = email_config.get('smtp_password')
263 254 mail_server = email_config.get('smtp_server')
264 255 mail_port = email_config.get('smtp_port')
265 256 tls = str2bool(email_config.get('smtp_use_tls'))
266 257 ssl = str2bool(email_config.get('smtp_use_ssl'))
267 258
268 259 try:
269 260 m = SmtpMailer(mail_from, user, passwd, mail_server,
270 261 mail_port, ssl, tls)
271 262 m.send(recipients, subject, body)
272 263 except:
273 264 log.error('Mail sending failed')
274 265 log.error(traceback.format_exc())
275 266 return False
276 267 return True
277 268
278 269 @task
279 270 def create_repo_fork(form_data, cur_user):
280 271 from rhodecode.model.repo import RepoModel
281 272 from vcs import get_backend
282 273 log = create_repo_fork.get_logger()
283 274 repo_model = RepoModel(get_session())
284 275 repo_model.create(form_data, cur_user, just_db=True, fork=True)
285 276 repo_name = form_data['repo_name']
286 277 repos_path = get_repos_path()
287 278 repo_path = os.path.join(repos_path, repo_name)
288 279 repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
289 280 alias = form_data['repo_type']
290 281
291 282 log.info('creating repo fork %s as %s', repo_name, repo_path)
292 283 backend = get_backend(alias)
293 284 backend(str(repo_fork_path), create=True, src_url=str(repo_path))
294 285
295 286 def __get_codes_stats(repo_name):
296 LANGUAGES_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx',
297 'aspx', 'asx', 'axd', 'c', 'cfg', 'cfm', 'cpp', 'cs', 'diff', 'do', 'el',
298 'erl', 'h', 'java', 'js', 'jsp', 'jspx', 'lisp', 'lua', 'm', 'mako', 'ml',
299 'pas', 'patch', 'php', 'php3', 'php4', 'phtml', 'pm', 'py', 'rb', 'rst',
300 's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws']
301
287 LANGUAGES_EXTENSIONS_MAP = {'scm': 'Scheme', 'asmx': 'VbNetAspx', 'Rout':
288 'RConsole', 'rest': 'Rst', 'abap': 'ABAP', 'go': 'Go', 'phtml': 'HtmlPhp',
289 'ns2': 'Newspeak', 'xml': 'EvoqueXml', 'sh-session': 'BashSession', 'ads':
290 'Ada', 'clj': 'Clojure', 'll': 'Llvm', 'ebuild': 'Bash', 'adb': 'Ada',
291 'ada': 'Ada', 'c++-objdump': 'CppObjdump', 'aspx':
292 'VbNetAspx', 'ksh': 'Bash', 'coffee': 'CoffeeScript', 'vert': 'GLShader',
293 'Makefile.*': 'Makefile', 'di': 'D', 'dpatch': 'DarcsPatch', 'rake':
294 'Ruby', 'moo': 'MOOCode', 'erl-sh': 'ErlangShell', 'geo': 'GLShader',
295 'pov': 'Povray', 'bas': 'VbNet', 'bat': 'Batch', 'd': 'D', 'lisp':
296 'CommonLisp', 'h': 'C', 'rbx': 'Ruby', 'tcl': 'Tcl', 'c++': 'Cpp', 'md':
297 'MiniD', '.vimrc': 'Vim', 'xsd': 'Xml', 'ml': 'Ocaml', 'el': 'CommonLisp',
298 'befunge': 'Befunge', 'xsl': 'Xslt', 'pyx': 'Cython', 'cfm':
299 'ColdfusionHtml', 'evoque': 'Evoque', 'cfg': 'Ini', 'htm': 'Html',
300 'Makefile': 'Makefile', 'cfc': 'ColdfusionHtml', 'tex': 'Tex', 'cs':
301 'CSharp', 'mxml': 'Mxml', 'patch': 'Diff', 'apache.conf': 'ApacheConf',
302 'scala': 'Scala', 'applescript': 'AppleScript', 'GNUmakefile': 'Makefile',
303 'c-objdump': 'CObjdump', 'lua': 'Lua', 'apache2.conf': 'ApacheConf', 'rb':
304 'Ruby', 'gemspec': 'Ruby', 'rl': 'RagelObjectiveC', 'vala': 'Vala', 'tmpl':
305 'Cheetah', 'bf': 'Brainfuck', 'plt': 'Gnuplot', 'G': 'AntlrRuby', 'xslt':
306 'Xslt', 'flxh': 'Felix', 'asax': 'VbNetAspx', 'Rakefile': 'Ruby', 'S': 'S',
307 'wsdl': 'Xml', 'js': 'Javascript', 'autodelegate': 'Myghty', 'properties':
308 'Ini', 'bash': 'Bash', 'c': 'C', 'g': 'AntlrRuby', 'r3': 'Rebol', 's':
309 'Gas', 'ashx': 'VbNetAspx', 'cxx': 'Cpp', 'boo': 'Boo', 'prolog': 'Prolog',
310 'sqlite3-console': 'SqliteConsole', 'cl': 'CommonLisp', 'cc': 'Cpp', 'pot':
311 'Gettext', 'vim': 'Vim', 'pxi': 'Cython', 'yaml': 'Yaml', 'SConstruct':
312 'Python', 'diff': 'Diff', 'txt': 'Text', 'cw': 'Redcode', 'pxd': 'Cython',
313 'plot': 'Gnuplot', 'java': 'Java', 'hrl': 'Erlang', 'py': 'Python',
314 'makefile': 'Makefile', 'squid.conf': 'SquidConf', 'asm': 'Nasm', 'toc':
315 'Tex', 'kid': 'Genshi', 'rhtml': 'Rhtml', 'po': 'Gettext', 'pl': 'Prolog',
316 'pm': 'Perl', 'hx': 'Haxe', 'ascx': 'VbNetAspx', 'ooc': 'Ooc', 'asy':
317 'Asymptote', 'hs': 'Haskell', 'SConscript': 'Python', 'pytb':
318 'PythonTraceback', 'myt': 'Myghty', 'hh': 'Cpp', 'R': 'S', 'aux': 'Tex',
319 'rst': 'Rst', 'cpp-objdump': 'CppObjdump', 'lgt': 'Logtalk', 'rss': 'Xml',
320 'flx': 'Felix', 'b': 'Brainfuck', 'f': 'Fortran', 'rbw': 'Ruby',
321 '.htaccess': 'ApacheConf', 'cxx-objdump': 'CppObjdump', 'j': 'ObjectiveJ',
322 'mll': 'Ocaml', 'yml': 'Yaml', 'mu': 'MuPAD', 'r': 'Rebol', 'ASM': 'Nasm',
323 'erl': 'Erlang', 'mly': 'Ocaml', 'mo': 'Modelica', 'def': 'Modula2', 'ini':
324 'Ini', 'control': 'DebianControl', 'vb': 'VbNet', 'vapi': 'Vala', 'pro':
325 'Prolog', 'spt': 'Cheetah', 'mli': 'Ocaml', 'as': 'ActionScript3', 'cmd':
326 'Batch', 'cpp': 'Cpp', 'io': 'Io', 'tac': 'Python', 'haml': 'Haml', 'rkt':
327 'Racket', 'st':'Smalltalk', 'inc': 'Povray', 'pas': 'Delphi', 'cmake':
328 'CMake', 'csh':'Tcsh', 'hpp': 'Cpp', 'feature': 'Gherkin', 'html': 'Html',
329 'php':'Php', 'php3':'Php', 'php4':'Php', 'php5':'Php', 'xhtml': 'Html',
330 'hxx': 'Cpp', 'eclass': 'Bash', 'css': 'Css',
331 'frag': 'GLShader', 'd-objdump': 'DObjdump', 'weechatlog': 'IrcLogs',
332 'tcsh': 'Tcsh', 'objdump': 'Objdump', 'pyw': 'Python', 'h++': 'Cpp',
333 'py3tb': 'Python3Traceback', 'jsp': 'Jsp', 'sql': 'Sql', 'mak': 'Makefile',
334 'php': 'Php', 'mao': 'Mako', 'man': 'Groff', 'dylan': 'Dylan', 'sass':
335 'Sass', 'cfml': 'ColdfusionHtml', 'darcspatch': 'DarcsPatch', 'tpl':
336 'Smarty', 'm': 'ObjectiveC', 'f90': 'Fortran', 'mod': 'Modula2', 'sh':
337 'Bash', 'lhs': 'LiterateHaskell', 'sources.list': 'SourcesList', 'axd':
338 'VbNetAspx', 'sc': 'Python'}
302 339
303 340 repos_path = get_repos_path()
304 341 p = os.path.join(repos_path, repo_name)
305 342 repo = get_repo(p)
306 343 tip = repo.get_changeset()
307 344 code_stats = {}
308 345
309 346 def aggregate(cs):
310 347 for f in cs[2]:
311 k = f.mimetype
312 if f.extension in LANGUAGES_EXTENSIONS:
313 if code_stats.has_key(k):
314 code_stats[k] += 1
348 ext = f.extension
349 key = LANGUAGES_EXTENSIONS_MAP.get(ext, ext)
350 key = key or ext
351 if ext in LANGUAGES_EXTENSIONS_MAP.keys():
352 if code_stats.has_key(key):
353 code_stats[key] += 1
315 354 else:
316 code_stats[k] = 1
355 code_stats[key] = 1
317 356
318 357 map(aggregate, tip.walk('/'))
319 358
320 359 return code_stats or {}
321 360
322 361
323 362
324 363
@@ -1,174 +1,201 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # rhodecode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 ################################################################################
11 11 ## Uncomment and replace with the address which should receive ##
12 12 ## any error reports after application crash ##
13 13 ## Additionally those settings will be used by rhodecode mailing system ##
14 14 ################################################################################
15 15 #email_to = admin@localhost
16 16 #error_email_from = paste_error@localhost
17 17 #app_email_from = rhodecode-noreply@localhost
18 18 #error_message =
19 19
20 20 #smtp_server = mail.server.com
21 21 #smtp_username =
22 22 #smtp_password =
23 23 #smtp_port =
24 24 #smtp_use_tls = false
25 25
26 26 [server:main]
27 27 ##nr of threads to spawn
28 28 threadpool_workers = 5
29 29
30 30 ##max request before thread respawn
31 31 threadpool_max_requests = 2
32 32
33 33 ##option to use threads of process
34 34 use_threadpool = true
35 35
36 36 use = egg:Paste#http
37 37 host = 127.0.0.1
38 38 port = 5000
39 39
40 40 [app:main]
41 41 use = egg:rhodecode
42 42 full_stack = true
43 43 static_files = true
44 44 lang=en
45 45 cache_dir = %(here)s/data
46 46 index_dir = /tmp/index
47 47
48 48 ####################################
49 ### CELERY CONFIG ####
50 ####################################
51 use_celery = false
52 broker.host = localhost
53 broker.vhost = rabbitmqhost
54 broker.port = 5672
55 broker.user = rabbitmq
56 broker.password = qweqwe
57
58 celery.imports = rhodecode.lib.celerylib.tasks
59
60 celery.result.backend = amqp
61 celery.result.dburi = amqp://
62 celery.result.serialier = json
63
64 #celery.send.task.error.emails = true
65 #celery.amqp.task.result.expires = 18000
66
67 celeryd.concurrency = 2
68 #celeryd.log.file = celeryd.log
69 celeryd.log.level = debug
70 celeryd.max.tasks.per.child = 3
71
72 #tasks will never be sent to the queue, but executed locally instead.
73 celery.always.eager = false
74
75 ####################################
49 76 ### BEAKER CACHE ####
50 77 ####################################
51 78 beaker.cache.data_dir=/%(here)s/data/cache/data
52 79 beaker.cache.lock_dir=/%(here)s/data/cache/lock
53 80 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
54 81
55 82 beaker.cache.super_short_term.type=memory
56 83 beaker.cache.super_short_term.expire=10
57 84
58 85 beaker.cache.short_term.type=memory
59 86 beaker.cache.short_term.expire=60
60 87
61 88 beaker.cache.long_term.type=memory
62 89 beaker.cache.long_term.expire=36000
63 90
64 91
65 92 beaker.cache.sql_cache_short.type=memory
66 93 beaker.cache.sql_cache_short.expire=5
67 94
68 95 beaker.cache.sql_cache_med.type=memory
69 96 beaker.cache.sql_cache_med.expire=360
70 97
71 98 beaker.cache.sql_cache_long.type=file
72 99 beaker.cache.sql_cache_long.expire=3600
73 100
74 101 ####################################
75 102 ### BEAKER SESSION ####
76 103 ####################################
77 104 ## Type of storage used for the session, current types are
78 105 ## dbm, file, memcached, database, and memory.
79 106 ## The storage uses the Container API
80 107 ##that is also used by the cache system.
81 108 beaker.session.type = file
82 109
83 110 beaker.session.key = rhodecode
84 111 beaker.session.secret = g654dcno0-9873jhgfreyu
85 112 beaker.session.timeout = 36000
86 113
87 114 ##auto save the session to not to use .save()
88 115 beaker.session.auto = False
89 116
90 117 ##true exire at browser close
91 118 #beaker.session.cookie_expires = 3600
92 119
93 120
94 121 ################################################################################
95 122 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
96 123 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
97 124 ## execute malicious code after an exception is raised. ##
98 125 ################################################################################
99 126 #set debug = false
100 127
101 128 ##################################
102 129 ### LOGVIEW CONFIG ###
103 130 ##################################
104 131 logview.sqlalchemy = #faa
105 132 logview.pylons.templating = #bfb
106 133 logview.pylons.util = #eee
107 134
108 135 #########################################################
109 136 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
110 137 #########################################################
111 138 sqlalchemy.db1.url = sqlite:///%(here)s/test.db
112 139 #sqlalchemy.db1.echo = False
113 140 #sqlalchemy.db1.pool_recycle = 3600
114 141 sqlalchemy.convert_unicode = true
115 142
116 143 ################################
117 144 ### LOGGING CONFIGURATION ####
118 145 ################################
119 146 [loggers]
120 147 keys = root, routes, rhodecode, sqlalchemy
121 148
122 149 [handlers]
123 150 keys = console
124 151
125 152 [formatters]
126 153 keys = generic,color_formatter
127 154
128 155 #############
129 156 ## LOGGERS ##
130 157 #############
131 158 [logger_root]
132 159 level = ERROR
133 160 handlers = console
134 161
135 162 [logger_routes]
136 163 level = ERROR
137 164 handlers = console
138 165 qualname = routes.middleware
139 166 # "level = DEBUG" logs the route matched and routing variables.
140 167
141 168 [logger_rhodecode]
142 169 level = ERROR
143 170 handlers = console
144 171 qualname = rhodecode
145 172 propagate = 0
146 173
147 174 [logger_sqlalchemy]
148 175 level = ERROR
149 176 handlers = console
150 177 qualname = sqlalchemy.engine
151 178 propagate = 0
152 179
153 180 ##############
154 181 ## HANDLERS ##
155 182 ##############
156 183
157 184 [handler_console]
158 185 class = StreamHandler
159 186 args = (sys.stderr,)
160 187 level = NOTSET
161 188 formatter = color_formatter
162 189
163 190 ################
164 191 ## FORMATTERS ##
165 192 ################
166 193
167 194 [formatter_generic]
168 195 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
169 196 datefmt = %Y-%m-%d %H:%M:%S
170 197
171 198 [formatter_color_formatter]
172 199 class=rhodecode.lib.colored_formatter.ColorFormatter
173 200 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
174 201 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now