##// END OF EJS Templates
git executable is now configurable via .ini files
marcink -
r3376:e67b2ef0 beta
parent child Browse files
Show More
@@ -1,442 +1,447 b''
1 ################################################################################
1 ################################################################################
2 ################################################################################
2 ################################################################################
3 # RhodeCode - Pylons environment configuration #
3 # RhodeCode - Pylons environment configuration #
4 # #
4 # #
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10 pdebug = false
10 pdebug = false
11 ################################################################################
11 ################################################################################
12 ## Uncomment and replace with the address which should receive ##
12 ## Uncomment and replace with the address which should receive ##
13 ## any error reports after application crash ##
13 ## any error reports after application crash ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 ################################################################################
15 ################################################################################
16 #email_to = admin@localhost
16 #email_to = admin@localhost
17 #error_email_from = paste_error@localhost
17 #error_email_from = paste_error@localhost
18 #app_email_from = rhodecode-noreply@localhost
18 #app_email_from = rhodecode-noreply@localhost
19 #error_message =
19 #error_message =
20 #email_prefix = [RhodeCode]
20 #email_prefix = [RhodeCode]
21
21
22 #smtp_server = mail.server.com
22 #smtp_server = mail.server.com
23 #smtp_username =
23 #smtp_username =
24 #smtp_password =
24 #smtp_password =
25 #smtp_port =
25 #smtp_port =
26 #smtp_use_tls = false
26 #smtp_use_tls = false
27 #smtp_use_ssl = true
27 #smtp_use_ssl = true
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 #smtp_auth =
29 #smtp_auth =
30
30
31 [server:main]
31 [server:main]
32 ## PASTE
32 ## PASTE
33 ##nr of threads to spawn
33 ##nr of threads to spawn
34 #threadpool_workers = 5
34 #threadpool_workers = 5
35
35
36 ##max request before thread respawn
36 ##max request before thread respawn
37 #threadpool_max_requests = 10
37 #threadpool_max_requests = 10
38
38
39 ##option to use threads of process
39 ##option to use threads of process
40 #use_threadpool = true
40 #use_threadpool = true
41
41
42 #use = egg:Paste#http
42 #use = egg:Paste#http
43
43
44 #WAITRESS
44 #WAITRESS
45 threads = 5
45 threads = 5
46 #100GB
47 max_request_body_size = 107374182400
46 use = egg:waitress#main
48 use = egg:waitress#main
47
49
48 host = 0.0.0.0
50 host = 0.0.0.0
49 port = 5000
51 port = 5000
50
52
51 [filter:proxy-prefix]
53 [filter:proxy-prefix]
52 # prefix middleware for rc
54 # prefix middleware for rc
53 use = egg:PasteDeploy#prefix
55 use = egg:PasteDeploy#prefix
54 prefix = /<your-prefix>
56 prefix = /<your-prefix>
55
57
56 [app:main]
58 [app:main]
57 use = egg:rhodecode
59 use = egg:rhodecode
58 #filter-with = proxy-prefix
60 #filter-with = proxy-prefix
59 full_stack = true
61 full_stack = true
60 static_files = true
62 static_files = true
61 # Optional Languages
63 # Optional Languages
62 # en, fr, ja, pt_BR, zh_CN, zh_TW, pl
64 # en, fr, ja, pt_BR, zh_CN, zh_TW, pl
63 lang = en
65 lang = en
64 cache_dir = %(here)s/data
66 cache_dir = %(here)s/data
65 index_dir = %(here)s/data/index
67 index_dir = %(here)s/data/index
66 app_instance_uuid = rc-develop
68 app_instance_uuid = rc-develop
67 cut_off_limit = 256000
69 cut_off_limit = 256000
68 vcs_full_cache = True
70 vcs_full_cache = True
69 # force https in RhodeCode, fixes https redirects, assumes it's always https
71 # force https in RhodeCode, fixes https redirects, assumes it's always https
70 force_https = false
72 force_https = false
71 # use Strict-Transport-Security headers
73 # use Strict-Transport-Security headers
72 use_htsts = false
74 use_htsts = false
73 commit_parse_limit = 25
75 commit_parse_limit = 25
74 # number of items displayed in lightweight dashboard before paginating
76 # number of items displayed in lightweight dashboard before paginating
75 dashboard_items = 100
77 dashboard_items = 100
76 use_gravatar = true
78 use_gravatar = true
77
79
80 # path to git executable
81 git_path = git
82
78 ## RSS feed options
83 ## RSS feed options
79
84
80 rss_cut_off_limit = 256000
85 rss_cut_off_limit = 256000
81 rss_items_per_page = 10
86 rss_items_per_page = 10
82 rss_include_diff = false
87 rss_include_diff = false
83
88
84
89
85 ## alternative_gravatar_url allows you to use your own avatar server application
90 ## alternative_gravatar_url allows you to use your own avatar server application
86 ## the following parts of the URL will be replaced
91 ## the following parts of the URL will be replaced
87 ## {email} user email
92 ## {email} user email
88 ## {md5email} md5 hash of the user email (like at gravatar.com)
93 ## {md5email} md5 hash of the user email (like at gravatar.com)
89 ## {size} size of the image that is expected from the server application
94 ## {size} size of the image that is expected from the server application
90 ## {scheme} http/https from RhodeCode server
95 ## {scheme} http/https from RhodeCode server
91 ## {netloc} network location from RhodeCode server
96 ## {netloc} network location from RhodeCode server
92 #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
97 #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
93 #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
98 #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
94
99
95 container_auth_enabled = false
100 container_auth_enabled = false
96 proxypass_auth_enabled = false
101 proxypass_auth_enabled = false
97 ## default encoding used to convert from and to unicode
102 ## default encoding used to convert from and to unicode
98 ## can be also a comma seperated list of encoding in case of mixed encodings
103 ## can be also a comma seperated list of encoding in case of mixed encodings
99 default_encoding = utf8
104 default_encoding = utf8
100
105
101 ## overwrite schema of clone url
106 ## overwrite schema of clone url
102 ## available vars:
107 ## available vars:
103 ## scheme - http/https
108 ## scheme - http/https
104 ## user - current user
109 ## user - current user
105 ## pass - password
110 ## pass - password
106 ## netloc - network location
111 ## netloc - network location
107 ## path - usually repo_name
112 ## path - usually repo_name
108
113
109 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
114 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
110
115
111 ## issue tracking mapping for commits messages
116 ## issue tracking mapping for commits messages
112 ## comment out issue_pat, issue_server, issue_prefix to enable
117 ## comment out issue_pat, issue_server, issue_prefix to enable
113
118
114 ## pattern to get the issues from commit messages
119 ## pattern to get the issues from commit messages
115 ## default one used here is #<numbers> with a regex passive group for `#`
120 ## default one used here is #<numbers> with a regex passive group for `#`
116 ## {id} will be all groups matched from this pattern
121 ## {id} will be all groups matched from this pattern
117
122
118 issue_pat = (?:\s*#)(\d+)
123 issue_pat = (?:\s*#)(\d+)
119
124
120 ## server url to the issue, each {id} will be replaced with match
125 ## server url to the issue, each {id} will be replaced with match
121 ## fetched from the regex and {repo} is replaced with full repository name
126 ## fetched from the regex and {repo} is replaced with full repository name
122 ## including groups {repo_name} is replaced with just name of repo
127 ## including groups {repo_name} is replaced with just name of repo
123
128
124 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
129 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
125
130
126 ## prefix to add to link to indicate it's an url
131 ## prefix to add to link to indicate it's an url
127 ## #314 will be replaced by <issue_prefix><id>
132 ## #314 will be replaced by <issue_prefix><id>
128
133
129 issue_prefix = #
134 issue_prefix = #
130
135
131 ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
136 ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
132 ## multiple patterns, to other issues server, wiki or others
137 ## multiple patterns, to other issues server, wiki or others
133 ## below an example how to create a wiki pattern
138 ## below an example how to create a wiki pattern
134 # #wiki-some-id -> https://mywiki.com/some-id
139 # #wiki-some-id -> https://mywiki.com/some-id
135
140
136 #issue_pat_wiki = (?:wiki-)(.+)
141 #issue_pat_wiki = (?:wiki-)(.+)
137 #issue_server_link_wiki = https://mywiki.com/{id}
142 #issue_server_link_wiki = https://mywiki.com/{id}
138 #issue_prefix_wiki = WIKI-
143 #issue_prefix_wiki = WIKI-
139
144
140
145
141 ## instance-id prefix
146 ## instance-id prefix
142 ## a prefix key for this instance used for cache invalidation when running
147 ## a prefix key for this instance used for cache invalidation when running
143 ## multiple instances of rhodecode, make sure it's globally unique for
148 ## multiple instances of rhodecode, make sure it's globally unique for
144 ## all running rhodecode instances. Leave empty if you don't use it
149 ## all running rhodecode instances. Leave empty if you don't use it
145 instance_id =
150 instance_id =
146
151
147 ## alternative return HTTP header for failed authentication. Default HTTP
152 ## alternative return HTTP header for failed authentication. Default HTTP
148 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
153 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
149 ## handling that. Set this variable to 403 to return HTTPForbidden
154 ## handling that. Set this variable to 403 to return HTTPForbidden
150 auth_ret_code =
155 auth_ret_code =
151
156
152 ####################################
157 ####################################
153 ### CELERY CONFIG ####
158 ### CELERY CONFIG ####
154 ####################################
159 ####################################
155 use_celery = false
160 use_celery = false
156 broker.host = localhost
161 broker.host = localhost
157 broker.vhost = rabbitmqhost
162 broker.vhost = rabbitmqhost
158 broker.port = 5672
163 broker.port = 5672
159 broker.user = rabbitmq
164 broker.user = rabbitmq
160 broker.password = qweqwe
165 broker.password = qweqwe
161
166
162 celery.imports = rhodecode.lib.celerylib.tasks
167 celery.imports = rhodecode.lib.celerylib.tasks
163
168
164 celery.result.backend = amqp
169 celery.result.backend = amqp
165 celery.result.dburi = amqp://
170 celery.result.dburi = amqp://
166 celery.result.serialier = json
171 celery.result.serialier = json
167
172
168 #celery.send.task.error.emails = true
173 #celery.send.task.error.emails = true
169 #celery.amqp.task.result.expires = 18000
174 #celery.amqp.task.result.expires = 18000
170
175
171 celeryd.concurrency = 2
176 celeryd.concurrency = 2
172 #celeryd.log.file = celeryd.log
177 #celeryd.log.file = celeryd.log
173 celeryd.log.level = debug
178 celeryd.log.level = debug
174 celeryd.max.tasks.per.child = 1
179 celeryd.max.tasks.per.child = 1
175
180
176 #tasks will never be sent to the queue, but executed locally instead.
181 #tasks will never be sent to the queue, but executed locally instead.
177 celery.always.eager = false
182 celery.always.eager = false
178
183
179 ####################################
184 ####################################
180 ### BEAKER CACHE ####
185 ### BEAKER CACHE ####
181 ####################################
186 ####################################
182 beaker.cache.data_dir=%(here)s/data/cache/data
187 beaker.cache.data_dir=%(here)s/data/cache/data
183 beaker.cache.lock_dir=%(here)s/data/cache/lock
188 beaker.cache.lock_dir=%(here)s/data/cache/lock
184
189
185 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
190 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
186
191
187 beaker.cache.super_short_term.type=memory
192 beaker.cache.super_short_term.type=memory
188 beaker.cache.super_short_term.expire=10
193 beaker.cache.super_short_term.expire=10
189 beaker.cache.super_short_term.key_length = 256
194 beaker.cache.super_short_term.key_length = 256
190
195
191 beaker.cache.short_term.type=memory
196 beaker.cache.short_term.type=memory
192 beaker.cache.short_term.expire=60
197 beaker.cache.short_term.expire=60
193 beaker.cache.short_term.key_length = 256
198 beaker.cache.short_term.key_length = 256
194
199
195 beaker.cache.long_term.type=memory
200 beaker.cache.long_term.type=memory
196 beaker.cache.long_term.expire=36000
201 beaker.cache.long_term.expire=36000
197 beaker.cache.long_term.key_length = 256
202 beaker.cache.long_term.key_length = 256
198
203
199 beaker.cache.sql_cache_short.type=memory
204 beaker.cache.sql_cache_short.type=memory
200 beaker.cache.sql_cache_short.expire=10
205 beaker.cache.sql_cache_short.expire=10
201 beaker.cache.sql_cache_short.key_length = 256
206 beaker.cache.sql_cache_short.key_length = 256
202
207
203 beaker.cache.sql_cache_med.type=memory
208 beaker.cache.sql_cache_med.type=memory
204 beaker.cache.sql_cache_med.expire=360
209 beaker.cache.sql_cache_med.expire=360
205 beaker.cache.sql_cache_med.key_length = 256
210 beaker.cache.sql_cache_med.key_length = 256
206
211
207 beaker.cache.sql_cache_long.type=file
212 beaker.cache.sql_cache_long.type=file
208 beaker.cache.sql_cache_long.expire=3600
213 beaker.cache.sql_cache_long.expire=3600
209 beaker.cache.sql_cache_long.key_length = 256
214 beaker.cache.sql_cache_long.key_length = 256
210
215
211 ####################################
216 ####################################
212 ### BEAKER SESSION ####
217 ### BEAKER SESSION ####
213 ####################################
218 ####################################
214 ## Type of storage used for the session, current types are
219 ## Type of storage used for the session, current types are
215 ## dbm, file, memcached, database, and memory.
220 ## dbm, file, memcached, database, and memory.
216 ## The storage uses the Container API
221 ## The storage uses the Container API
217 ## that is also used by the cache system.
222 ## that is also used by the cache system.
218
223
219 ## db session ##
224 ## db session ##
220 #beaker.session.type = ext:database
225 #beaker.session.type = ext:database
221 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
226 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
222 #beaker.session.table_name = db_session
227 #beaker.session.table_name = db_session
223
228
224 ## encrypted cookie client side session, good for many instances ##
229 ## encrypted cookie client side session, good for many instances ##
225 #beaker.session.type = cookie
230 #beaker.session.type = cookie
226
231
227 ## file based cookies (default) ##
232 ## file based cookies (default) ##
228 #beaker.session.type = file
233 #beaker.session.type = file
229
234
230
235
231 beaker.session.key = rhodecode
236 beaker.session.key = rhodecode
232 ## secure cookie requires AES python libraries ##
237 ## secure cookie requires AES python libraries ##
233 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
238 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
234 #beaker.session.validate_key = 9712sds2212c--zxc123
239 #beaker.session.validate_key = 9712sds2212c--zxc123
235 ## sets session as invalid if it haven't been accessed for given amount of time
240 ## sets session as invalid if it haven't been accessed for given amount of time
236 beaker.session.timeout = 2592000
241 beaker.session.timeout = 2592000
237 beaker.session.httponly = true
242 beaker.session.httponly = true
238 #beaker.session.cookie_path = /<your-prefix>
243 #beaker.session.cookie_path = /<your-prefix>
239
244
240 ## uncomment for https secure cookie ##
245 ## uncomment for https secure cookie ##
241 beaker.session.secure = false
246 beaker.session.secure = false
242
247
243 ## auto save the session to not to use .save() ##
248 ## auto save the session to not to use .save() ##
244 beaker.session.auto = False
249 beaker.session.auto = False
245
250
246 ## default cookie expiration time in seconds `true` expire at browser close ##
251 ## default cookie expiration time in seconds `true` expire at browser close ##
247 #beaker.session.cookie_expires = 3600
252 #beaker.session.cookie_expires = 3600
248
253
249
254
250 ############################
255 ############################
251 ## ERROR HANDLING SYSTEMS ##
256 ## ERROR HANDLING SYSTEMS ##
252 ############################
257 ############################
253
258
254 ####################
259 ####################
255 ### [errormator] ###
260 ### [errormator] ###
256 ####################
261 ####################
257
262
258 # Errormator is tailored to work with RhodeCode, see
263 # Errormator is tailored to work with RhodeCode, see
259 # http://errormator.com for details how to obtain an account
264 # http://errormator.com for details how to obtain an account
260 # you must install python package `errormator_client` to make it work
265 # you must install python package `errormator_client` to make it work
261
266
262 # errormator enabled
267 # errormator enabled
263 errormator = true
268 errormator = true
264
269
265 errormator.server_url = https://api.errormator.com
270 errormator.server_url = https://api.errormator.com
266 errormator.api_key = YOUR_API_KEY
271 errormator.api_key = YOUR_API_KEY
267
272
268 # TWEAK AMOUNT OF INFO SENT HERE
273 # TWEAK AMOUNT OF INFO SENT HERE
269
274
270 # enables 404 error logging (default False)
275 # enables 404 error logging (default False)
271 errormator.report_404 = false
276 errormator.report_404 = false
272
277
273 # time in seconds after request is considered being slow (default 1)
278 # time in seconds after request is considered being slow (default 1)
274 errormator.slow_request_time = 1
279 errormator.slow_request_time = 1
275
280
276 # record slow requests in application
281 # record slow requests in application
277 # (needs to be enabled for slow datastore recording and time tracking)
282 # (needs to be enabled for slow datastore recording and time tracking)
278 errormator.slow_requests = true
283 errormator.slow_requests = true
279
284
280 # enable hooking to application loggers
285 # enable hooking to application loggers
281 # errormator.logging = true
286 # errormator.logging = true
282
287
283 # minimum log level for log capture
288 # minimum log level for log capture
284 # errormator.logging.level = WARNING
289 # errormator.logging.level = WARNING
285
290
286 # send logs only from erroneous/slow requests
291 # send logs only from erroneous/slow requests
287 # (saves API quota for intensive logging)
292 # (saves API quota for intensive logging)
288 errormator.logging_on_error = false
293 errormator.logging_on_error = false
289
294
290 # list of additonal keywords that should be grabbed from environ object
295 # list of additonal keywords that should be grabbed from environ object
291 # can be string with comma separated list of words in lowercase
296 # can be string with comma separated list of words in lowercase
292 # (by default client will always send following info:
297 # (by default client will always send following info:
293 # 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
298 # 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
294 # start with HTTP* this list be extended with additional keywords here
299 # start with HTTP* this list be extended with additional keywords here
295 errormator.environ_keys_whitelist =
300 errormator.environ_keys_whitelist =
296
301
297
302
298 # list of keywords that should be blanked from request object
303 # list of keywords that should be blanked from request object
299 # can be string with comma separated list of words in lowercase
304 # can be string with comma separated list of words in lowercase
300 # (by default client will always blank keys that contain following words
305 # (by default client will always blank keys that contain following words
301 # 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
306 # 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
302 # this list be extended with additional keywords set here
307 # this list be extended with additional keywords set here
303 errormator.request_keys_blacklist =
308 errormator.request_keys_blacklist =
304
309
305
310
306 # list of namespaces that should be ignores when gathering log entries
311 # list of namespaces that should be ignores when gathering log entries
307 # can be string with comma separated list of namespaces
312 # can be string with comma separated list of namespaces
308 # (by default the client ignores own entries: errormator_client.client)
313 # (by default the client ignores own entries: errormator_client.client)
309 errormator.log_namespace_blacklist =
314 errormator.log_namespace_blacklist =
310
315
311
316
312 ################
317 ################
313 ### [sentry] ###
318 ### [sentry] ###
314 ################
319 ################
315
320
316 # sentry is a alternative open source error aggregator
321 # sentry is a alternative open source error aggregator
317 # you must install python packages `sentry` and `raven` to enable
322 # you must install python packages `sentry` and `raven` to enable
318
323
319 sentry.dsn = YOUR_DNS
324 sentry.dsn = YOUR_DNS
320 sentry.servers =
325 sentry.servers =
321 sentry.name =
326 sentry.name =
322 sentry.key =
327 sentry.key =
323 sentry.public_key =
328 sentry.public_key =
324 sentry.secret_key =
329 sentry.secret_key =
325 sentry.project =
330 sentry.project =
326 sentry.site =
331 sentry.site =
327 sentry.include_paths =
332 sentry.include_paths =
328 sentry.exclude_paths =
333 sentry.exclude_paths =
329
334
330
335
331 ################################################################################
336 ################################################################################
332 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
337 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
333 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
338 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
334 ## execute malicious code after an exception is raised. ##
339 ## execute malicious code after an exception is raised. ##
335 ################################################################################
340 ################################################################################
336 #set debug = false
341 #set debug = false
337
342
338 ##################################
343 ##################################
339 ### LOGVIEW CONFIG ###
344 ### LOGVIEW CONFIG ###
340 ##################################
345 ##################################
341 logview.sqlalchemy = #faa
346 logview.sqlalchemy = #faa
342 logview.pylons.templating = #bfb
347 logview.pylons.templating = #bfb
343 logview.pylons.util = #eee
348 logview.pylons.util = #eee
344
349
345 #########################################################
350 #########################################################
346 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
351 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
347 #########################################################
352 #########################################################
348 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
353 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
349 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
354 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
350 sqlalchemy.db1.echo = false
355 sqlalchemy.db1.echo = false
351 sqlalchemy.db1.pool_recycle = 3600
356 sqlalchemy.db1.pool_recycle = 3600
352 sqlalchemy.db1.convert_unicode = true
357 sqlalchemy.db1.convert_unicode = true
353
358
354 ################################
359 ################################
355 ### LOGGING CONFIGURATION ####
360 ### LOGGING CONFIGURATION ####
356 ################################
361 ################################
357 [loggers]
362 [loggers]
358 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
363 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
359
364
360 [handlers]
365 [handlers]
361 keys = console, console_sql
366 keys = console, console_sql
362
367
363 [formatters]
368 [formatters]
364 keys = generic, color_formatter, color_formatter_sql
369 keys = generic, color_formatter, color_formatter_sql
365
370
366 #############
371 #############
367 ## LOGGERS ##
372 ## LOGGERS ##
368 #############
373 #############
369 [logger_root]
374 [logger_root]
370 level = NOTSET
375 level = NOTSET
371 handlers = console
376 handlers = console
372
377
373 [logger_routes]
378 [logger_routes]
374 level = DEBUG
379 level = DEBUG
375 handlers =
380 handlers =
376 qualname = routes.middleware
381 qualname = routes.middleware
377 # "level = DEBUG" logs the route matched and routing variables.
382 # "level = DEBUG" logs the route matched and routing variables.
378 propagate = 1
383 propagate = 1
379
384
380 [logger_beaker]
385 [logger_beaker]
381 level = DEBUG
386 level = DEBUG
382 handlers =
387 handlers =
383 qualname = beaker.container
388 qualname = beaker.container
384 propagate = 1
389 propagate = 1
385
390
386 [logger_templates]
391 [logger_templates]
387 level = INFO
392 level = INFO
388 handlers =
393 handlers =
389 qualname = pylons.templating
394 qualname = pylons.templating
390 propagate = 1
395 propagate = 1
391
396
392 [logger_rhodecode]
397 [logger_rhodecode]
393 level = DEBUG
398 level = DEBUG
394 handlers =
399 handlers =
395 qualname = rhodecode
400 qualname = rhodecode
396 propagate = 1
401 propagate = 1
397
402
398 [logger_sqlalchemy]
403 [logger_sqlalchemy]
399 level = INFO
404 level = INFO
400 handlers = console_sql
405 handlers = console_sql
401 qualname = sqlalchemy.engine
406 qualname = sqlalchemy.engine
402 propagate = 0
407 propagate = 0
403
408
404 [logger_whoosh_indexer]
409 [logger_whoosh_indexer]
405 level = DEBUG
410 level = DEBUG
406 handlers =
411 handlers =
407 qualname = whoosh_indexer
412 qualname = whoosh_indexer
408 propagate = 1
413 propagate = 1
409
414
410 ##############
415 ##############
411 ## HANDLERS ##
416 ## HANDLERS ##
412 ##############
417 ##############
413
418
414 [handler_console]
419 [handler_console]
415 class = StreamHandler
420 class = StreamHandler
416 args = (sys.stderr,)
421 args = (sys.stderr,)
417 level = DEBUG
422 level = DEBUG
418 formatter = color_formatter
423 formatter = color_formatter
419
424
420 [handler_console_sql]
425 [handler_console_sql]
421 class = StreamHandler
426 class = StreamHandler
422 args = (sys.stderr,)
427 args = (sys.stderr,)
423 level = DEBUG
428 level = DEBUG
424 formatter = color_formatter_sql
429 formatter = color_formatter_sql
425
430
426 ################
431 ################
427 ## FORMATTERS ##
432 ## FORMATTERS ##
428 ################
433 ################
429
434
430 [formatter_generic]
435 [formatter_generic]
431 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
436 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
432 datefmt = %Y-%m-%d %H:%M:%S
437 datefmt = %Y-%m-%d %H:%M:%S
433
438
434 [formatter_color_formatter]
439 [formatter_color_formatter]
435 class=rhodecode.lib.colored_formatter.ColorFormatter
440 class=rhodecode.lib.colored_formatter.ColorFormatter
436 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
441 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
437 datefmt = %Y-%m-%d %H:%M:%S
442 datefmt = %Y-%m-%d %H:%M:%S
438
443
439 [formatter_color_formatter_sql]
444 [formatter_color_formatter_sql]
440 class=rhodecode.lib.colored_formatter.ColorFormatterSql
445 class=rhodecode.lib.colored_formatter.ColorFormatterSql
441 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
446 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
442 datefmt = %Y-%m-%d %H:%M:%S
447 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,442 +1,447 b''
1 ################################################################################
1 ################################################################################
2 ################################################################################
2 ################################################################################
3 # RhodeCode - Pylons environment configuration #
3 # RhodeCode - Pylons environment configuration #
4 # #
4 # #
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10 pdebug = false
10 pdebug = false
11 ################################################################################
11 ################################################################################
12 ## Uncomment and replace with the address which should receive ##
12 ## Uncomment and replace with the address which should receive ##
13 ## any error reports after application crash ##
13 ## any error reports after application crash ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 ################################################################################
15 ################################################################################
16 #email_to = admin@localhost
16 #email_to = admin@localhost
17 #error_email_from = paste_error@localhost
17 #error_email_from = paste_error@localhost
18 #app_email_from = rhodecode-noreply@localhost
18 #app_email_from = rhodecode-noreply@localhost
19 #error_message =
19 #error_message =
20 #email_prefix = [RhodeCode]
20 #email_prefix = [RhodeCode]
21
21
22 #smtp_server = mail.server.com
22 #smtp_server = mail.server.com
23 #smtp_username =
23 #smtp_username =
24 #smtp_password =
24 #smtp_password =
25 #smtp_port =
25 #smtp_port =
26 #smtp_use_tls = false
26 #smtp_use_tls = false
27 #smtp_use_ssl = true
27 #smtp_use_ssl = true
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 #smtp_auth =
29 #smtp_auth =
30
30
31 [server:main]
31 [server:main]
32 ## PASTE
32 ## PASTE
33 ##nr of threads to spawn
33 ##nr of threads to spawn
34 #threadpool_workers = 5
34 #threadpool_workers = 5
35
35
36 ##max request before thread respawn
36 ##max request before thread respawn
37 #threadpool_max_requests = 10
37 #threadpool_max_requests = 10
38
38
39 ##option to use threads of process
39 ##option to use threads of process
40 #use_threadpool = true
40 #use_threadpool = true
41
41
42 #use = egg:Paste#http
42 #use = egg:Paste#http
43
43
44 #WAITRESS
44 #WAITRESS
45 threads = 5
45 threads = 5
46 #100GB
47 max_request_body_size = 107374182400
46 use = egg:waitress#main
48 use = egg:waitress#main
47
49
48 host = 127.0.0.1
50 host = 127.0.0.1
49 port = 8001
51 port = 8001
50
52
51 [filter:proxy-prefix]
53 [filter:proxy-prefix]
52 # prefix middleware for rc
54 # prefix middleware for rc
53 use = egg:PasteDeploy#prefix
55 use = egg:PasteDeploy#prefix
54 prefix = /<your-prefix>
56 prefix = /<your-prefix>
55
57
56 [app:main]
58 [app:main]
57 use = egg:rhodecode
59 use = egg:rhodecode
58 #filter-with = proxy-prefix
60 #filter-with = proxy-prefix
59 full_stack = true
61 full_stack = true
60 static_files = true
62 static_files = true
61 # Optional Languages
63 # Optional Languages
62 # en, fr, ja, pt_BR, zh_CN, zh_TW, pl
64 # en, fr, ja, pt_BR, zh_CN, zh_TW, pl
63 lang = en
65 lang = en
64 cache_dir = %(here)s/data
66 cache_dir = %(here)s/data
65 index_dir = %(here)s/data/index
67 index_dir = %(here)s/data/index
66 app_instance_uuid = rc-production
68 app_instance_uuid = rc-production
67 cut_off_limit = 256000
69 cut_off_limit = 256000
68 vcs_full_cache = True
70 vcs_full_cache = True
69 # force https in RhodeCode, fixes https redirects, assumes it's always https
71 # force https in RhodeCode, fixes https redirects, assumes it's always https
70 force_https = false
72 force_https = false
71 # use Strict-Transport-Security headers
73 # use Strict-Transport-Security headers
72 use_htsts = false
74 use_htsts = false
73 commit_parse_limit = 50
75 commit_parse_limit = 50
74 # number of items displayed in lightweight dashboard before paginating
76 # number of items displayed in lightweight dashboard before paginating
75 dashboard_items = 100
77 dashboard_items = 100
76 use_gravatar = true
78 use_gravatar = true
77
79
80 # path to git executable
81 git_path = git
82
78 ## RSS feed options
83 ## RSS feed options
79
84
80 rss_cut_off_limit = 256000
85 rss_cut_off_limit = 256000
81 rss_items_per_page = 10
86 rss_items_per_page = 10
82 rss_include_diff = false
87 rss_include_diff = false
83
88
84
89
85 ## alternative_gravatar_url allows you to use your own avatar server application
90 ## alternative_gravatar_url allows you to use your own avatar server application
86 ## the following parts of the URL will be replaced
91 ## the following parts of the URL will be replaced
87 ## {email} user email
92 ## {email} user email
88 ## {md5email} md5 hash of the user email (like at gravatar.com)
93 ## {md5email} md5 hash of the user email (like at gravatar.com)
89 ## {size} size of the image that is expected from the server application
94 ## {size} size of the image that is expected from the server application
90 ## {scheme} http/https from RhodeCode server
95 ## {scheme} http/https from RhodeCode server
91 ## {netloc} network location from RhodeCode server
96 ## {netloc} network location from RhodeCode server
92 #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
97 #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
93 #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
98 #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
94
99
95 container_auth_enabled = false
100 container_auth_enabled = false
96 proxypass_auth_enabled = false
101 proxypass_auth_enabled = false
97 ## default encoding used to convert from and to unicode
102 ## default encoding used to convert from and to unicode
98 ## can be also a comma seperated list of encoding in case of mixed encodings
103 ## can be also a comma seperated list of encoding in case of mixed encodings
99 default_encoding = utf8
104 default_encoding = utf8
100
105
101 ## overwrite schema of clone url
106 ## overwrite schema of clone url
102 ## available vars:
107 ## available vars:
103 ## scheme - http/https
108 ## scheme - http/https
104 ## user - current user
109 ## user - current user
105 ## pass - password
110 ## pass - password
106 ## netloc - network location
111 ## netloc - network location
107 ## path - usually repo_name
112 ## path - usually repo_name
108
113
109 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
114 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
110
115
111 ## issue tracking mapping for commits messages
116 ## issue tracking mapping for commits messages
112 ## comment out issue_pat, issue_server, issue_prefix to enable
117 ## comment out issue_pat, issue_server, issue_prefix to enable
113
118
114 ## pattern to get the issues from commit messages
119 ## pattern to get the issues from commit messages
115 ## default one used here is #<numbers> with a regex passive group for `#`
120 ## default one used here is #<numbers> with a regex passive group for `#`
116 ## {id} will be all groups matched from this pattern
121 ## {id} will be all groups matched from this pattern
117
122
118 issue_pat = (?:\s*#)(\d+)
123 issue_pat = (?:\s*#)(\d+)
119
124
120 ## server url to the issue, each {id} will be replaced with match
125 ## server url to the issue, each {id} will be replaced with match
121 ## fetched from the regex and {repo} is replaced with full repository name
126 ## fetched from the regex and {repo} is replaced with full repository name
122 ## including groups {repo_name} is replaced with just name of repo
127 ## including groups {repo_name} is replaced with just name of repo
123
128
124 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
129 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
125
130
126 ## prefix to add to link to indicate it's an url
131 ## prefix to add to link to indicate it's an url
127 ## #314 will be replaced by <issue_prefix><id>
132 ## #314 will be replaced by <issue_prefix><id>
128
133
129 issue_prefix = #
134 issue_prefix = #
130
135
131 ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
136 ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
132 ## multiple patterns, to other issues server, wiki or others
137 ## multiple patterns, to other issues server, wiki or others
133 ## below an example how to create a wiki pattern
138 ## below an example how to create a wiki pattern
134 # #wiki-some-id -> https://mywiki.com/some-id
139 # #wiki-some-id -> https://mywiki.com/some-id
135
140
136 #issue_pat_wiki = (?:wiki-)(.+)
141 #issue_pat_wiki = (?:wiki-)(.+)
137 #issue_server_link_wiki = https://mywiki.com/{id}
142 #issue_server_link_wiki = https://mywiki.com/{id}
138 #issue_prefix_wiki = WIKI-
143 #issue_prefix_wiki = WIKI-
139
144
140
145
141 ## instance-id prefix
146 ## instance-id prefix
142 ## a prefix key for this instance used for cache invalidation when running
147 ## a prefix key for this instance used for cache invalidation when running
143 ## multiple instances of rhodecode, make sure it's globally unique for
148 ## multiple instances of rhodecode, make sure it's globally unique for
144 ## all running rhodecode instances. Leave empty if you don't use it
149 ## all running rhodecode instances. Leave empty if you don't use it
145 instance_id =
150 instance_id =
146
151
147 ## alternative return HTTP header for failed authentication. Default HTTP
152 ## alternative return HTTP header for failed authentication. Default HTTP
148 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
153 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
149 ## handling that. Set this variable to 403 to return HTTPForbidden
154 ## handling that. Set this variable to 403 to return HTTPForbidden
150 auth_ret_code =
155 auth_ret_code =
151
156
152 ####################################
157 ####################################
153 ### CELERY CONFIG ####
158 ### CELERY CONFIG ####
154 ####################################
159 ####################################
155 use_celery = false
160 use_celery = false
156 broker.host = localhost
161 broker.host = localhost
157 broker.vhost = rabbitmqhost
162 broker.vhost = rabbitmqhost
158 broker.port = 5672
163 broker.port = 5672
159 broker.user = rabbitmq
164 broker.user = rabbitmq
160 broker.password = qweqwe
165 broker.password = qweqwe
161
166
162 celery.imports = rhodecode.lib.celerylib.tasks
167 celery.imports = rhodecode.lib.celerylib.tasks
163
168
164 celery.result.backend = amqp
169 celery.result.backend = amqp
165 celery.result.dburi = amqp://
170 celery.result.dburi = amqp://
166 celery.result.serialier = json
171 celery.result.serialier = json
167
172
168 #celery.send.task.error.emails = true
173 #celery.send.task.error.emails = true
169 #celery.amqp.task.result.expires = 18000
174 #celery.amqp.task.result.expires = 18000
170
175
171 celeryd.concurrency = 2
176 celeryd.concurrency = 2
172 #celeryd.log.file = celeryd.log
177 #celeryd.log.file = celeryd.log
173 celeryd.log.level = debug
178 celeryd.log.level = debug
174 celeryd.max.tasks.per.child = 1
179 celeryd.max.tasks.per.child = 1
175
180
176 #tasks will never be sent to the queue, but executed locally instead.
181 #tasks will never be sent to the queue, but executed locally instead.
177 celery.always.eager = false
182 celery.always.eager = false
178
183
179 ####################################
184 ####################################
180 ### BEAKER CACHE ####
185 ### BEAKER CACHE ####
181 ####################################
186 ####################################
182 beaker.cache.data_dir=%(here)s/data/cache/data
187 beaker.cache.data_dir=%(here)s/data/cache/data
183 beaker.cache.lock_dir=%(here)s/data/cache/lock
188 beaker.cache.lock_dir=%(here)s/data/cache/lock
184
189
185 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
190 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
186
191
187 beaker.cache.super_short_term.type=memory
192 beaker.cache.super_short_term.type=memory
188 beaker.cache.super_short_term.expire=10
193 beaker.cache.super_short_term.expire=10
189 beaker.cache.super_short_term.key_length = 256
194 beaker.cache.super_short_term.key_length = 256
190
195
191 beaker.cache.short_term.type=memory
196 beaker.cache.short_term.type=memory
192 beaker.cache.short_term.expire=60
197 beaker.cache.short_term.expire=60
193 beaker.cache.short_term.key_length = 256
198 beaker.cache.short_term.key_length = 256
194
199
195 beaker.cache.long_term.type=memory
200 beaker.cache.long_term.type=memory
196 beaker.cache.long_term.expire=36000
201 beaker.cache.long_term.expire=36000
197 beaker.cache.long_term.key_length = 256
202 beaker.cache.long_term.key_length = 256
198
203
199 beaker.cache.sql_cache_short.type=memory
204 beaker.cache.sql_cache_short.type=memory
200 beaker.cache.sql_cache_short.expire=10
205 beaker.cache.sql_cache_short.expire=10
201 beaker.cache.sql_cache_short.key_length = 256
206 beaker.cache.sql_cache_short.key_length = 256
202
207
203 beaker.cache.sql_cache_med.type=memory
208 beaker.cache.sql_cache_med.type=memory
204 beaker.cache.sql_cache_med.expire=360
209 beaker.cache.sql_cache_med.expire=360
205 beaker.cache.sql_cache_med.key_length = 256
210 beaker.cache.sql_cache_med.key_length = 256
206
211
207 beaker.cache.sql_cache_long.type=file
212 beaker.cache.sql_cache_long.type=file
208 beaker.cache.sql_cache_long.expire=3600
213 beaker.cache.sql_cache_long.expire=3600
209 beaker.cache.sql_cache_long.key_length = 256
214 beaker.cache.sql_cache_long.key_length = 256
210
215
211 ####################################
216 ####################################
212 ### BEAKER SESSION ####
217 ### BEAKER SESSION ####
213 ####################################
218 ####################################
214 ## Type of storage used for the session, current types are
219 ## Type of storage used for the session, current types are
215 ## dbm, file, memcached, database, and memory.
220 ## dbm, file, memcached, database, and memory.
216 ## The storage uses the Container API
221 ## The storage uses the Container API
217 ## that is also used by the cache system.
222 ## that is also used by the cache system.
218
223
219 ## db session ##
224 ## db session ##
220 #beaker.session.type = ext:database
225 #beaker.session.type = ext:database
221 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
226 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
222 #beaker.session.table_name = db_session
227 #beaker.session.table_name = db_session
223
228
224 ## encrypted cookie client side session, good for many instances ##
229 ## encrypted cookie client side session, good for many instances ##
225 #beaker.session.type = cookie
230 #beaker.session.type = cookie
226
231
227 ## file based cookies (default) ##
232 ## file based cookies (default) ##
228 #beaker.session.type = file
233 #beaker.session.type = file
229
234
230
235
231 beaker.session.key = rhodecode
236 beaker.session.key = rhodecode
232 ## secure cookie requires AES python libraries ##
237 ## secure cookie requires AES python libraries ##
233 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
238 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
234 #beaker.session.validate_key = 9712sds2212c--zxc123
239 #beaker.session.validate_key = 9712sds2212c--zxc123
235 ## sets session as invalid if it haven't been accessed for given amount of time
240 ## sets session as invalid if it haven't been accessed for given amount of time
236 beaker.session.timeout = 2592000
241 beaker.session.timeout = 2592000
237 beaker.session.httponly = true
242 beaker.session.httponly = true
238 #beaker.session.cookie_path = /<your-prefix>
243 #beaker.session.cookie_path = /<your-prefix>
239
244
240 ## uncomment for https secure cookie ##
245 ## uncomment for https secure cookie ##
241 beaker.session.secure = false
246 beaker.session.secure = false
242
247
243 ## auto save the session to not to use .save() ##
248 ## auto save the session to not to use .save() ##
244 beaker.session.auto = False
249 beaker.session.auto = False
245
250
246 ## default cookie expiration time in seconds `true` expire at browser close ##
251 ## default cookie expiration time in seconds `true` expire at browser close ##
247 #beaker.session.cookie_expires = 3600
252 #beaker.session.cookie_expires = 3600
248
253
249
254
250 ############################
255 ############################
251 ## ERROR HANDLING SYSTEMS ##
256 ## ERROR HANDLING SYSTEMS ##
252 ############################
257 ############################
253
258
254 ####################
259 ####################
255 ### [errormator] ###
260 ### [errormator] ###
256 ####################
261 ####################
257
262
258 # Errormator is tailored to work with RhodeCode, see
263 # Errormator is tailored to work with RhodeCode, see
259 # http://errormator.com for details how to obtain an account
264 # http://errormator.com for details how to obtain an account
260 # you must install python package `errormator_client` to make it work
265 # you must install python package `errormator_client` to make it work
261
266
262 # errormator enabled
267 # errormator enabled
263 errormator = true
268 errormator = true
264
269
265 errormator.server_url = https://api.errormator.com
270 errormator.server_url = https://api.errormator.com
266 errormator.api_key = YOUR_API_KEY
271 errormator.api_key = YOUR_API_KEY
267
272
268 # TWEAK AMOUNT OF INFO SENT HERE
273 # TWEAK AMOUNT OF INFO SENT HERE
269
274
270 # enables 404 error logging (default False)
275 # enables 404 error logging (default False)
271 errormator.report_404 = false
276 errormator.report_404 = false
272
277
273 # time in seconds after request is considered being slow (default 1)
278 # time in seconds after request is considered being slow (default 1)
274 errormator.slow_request_time = 1
279 errormator.slow_request_time = 1
275
280
276 # record slow requests in application
281 # record slow requests in application
277 # (needs to be enabled for slow datastore recording and time tracking)
282 # (needs to be enabled for slow datastore recording and time tracking)
278 errormator.slow_requests = true
283 errormator.slow_requests = true
279
284
280 # enable hooking to application loggers
285 # enable hooking to application loggers
281 # errormator.logging = true
286 # errormator.logging = true
282
287
283 # minimum log level for log capture
288 # minimum log level for log capture
284 # errormator.logging.level = WARNING
289 # errormator.logging.level = WARNING
285
290
286 # send logs only from erroneous/slow requests
291 # send logs only from erroneous/slow requests
287 # (saves API quota for intensive logging)
292 # (saves API quota for intensive logging)
288 errormator.logging_on_error = false
293 errormator.logging_on_error = false
289
294
290 # list of additonal keywords that should be grabbed from environ object
295 # list of additonal keywords that should be grabbed from environ object
291 # can be string with comma separated list of words in lowercase
296 # can be string with comma separated list of words in lowercase
292 # (by default client will always send following info:
297 # (by default client will always send following info:
293 # 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
298 # 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
294 # start with HTTP* this list be extended with additional keywords here
299 # start with HTTP* this list be extended with additional keywords here
295 errormator.environ_keys_whitelist =
300 errormator.environ_keys_whitelist =
296
301
297
302
298 # list of keywords that should be blanked from request object
303 # list of keywords that should be blanked from request object
299 # can be string with comma separated list of words in lowercase
304 # can be string with comma separated list of words in lowercase
300 # (by default client will always blank keys that contain following words
305 # (by default client will always blank keys that contain following words
301 # 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
306 # 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
302 # this list be extended with additional keywords set here
307 # this list be extended with additional keywords set here
303 errormator.request_keys_blacklist =
308 errormator.request_keys_blacklist =
304
309
305
310
306 # list of namespaces that should be ignores when gathering log entries
311 # list of namespaces that should be ignores when gathering log entries
307 # can be string with comma separated list of namespaces
312 # can be string with comma separated list of namespaces
308 # (by default the client ignores own entries: errormator_client.client)
313 # (by default the client ignores own entries: errormator_client.client)
309 errormator.log_namespace_blacklist =
314 errormator.log_namespace_blacklist =
310
315
311
316
312 ################
317 ################
313 ### [sentry] ###
318 ### [sentry] ###
314 ################
319 ################
315
320
316 # sentry is a alternative open source error aggregator
321 # sentry is a alternative open source error aggregator
317 # you must install python packages `sentry` and `raven` to enable
322 # you must install python packages `sentry` and `raven` to enable
318
323
319 sentry.dsn = YOUR_DNS
324 sentry.dsn = YOUR_DNS
320 sentry.servers =
325 sentry.servers =
321 sentry.name =
326 sentry.name =
322 sentry.key =
327 sentry.key =
323 sentry.public_key =
328 sentry.public_key =
324 sentry.secret_key =
329 sentry.secret_key =
325 sentry.project =
330 sentry.project =
326 sentry.site =
331 sentry.site =
327 sentry.include_paths =
332 sentry.include_paths =
328 sentry.exclude_paths =
333 sentry.exclude_paths =
329
334
330
335
331 ################################################################################
336 ################################################################################
332 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
337 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
333 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
338 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
334 ## execute malicious code after an exception is raised. ##
339 ## execute malicious code after an exception is raised. ##
335 ################################################################################
340 ################################################################################
336 set debug = false
341 set debug = false
337
342
338 ##################################
343 ##################################
339 ### LOGVIEW CONFIG ###
344 ### LOGVIEW CONFIG ###
340 ##################################
345 ##################################
341 logview.sqlalchemy = #faa
346 logview.sqlalchemy = #faa
342 logview.pylons.templating = #bfb
347 logview.pylons.templating = #bfb
343 logview.pylons.util = #eee
348 logview.pylons.util = #eee
344
349
345 #########################################################
350 #########################################################
346 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
351 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
347 #########################################################
352 #########################################################
348 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
353 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
349 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
354 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
350 sqlalchemy.db1.echo = false
355 sqlalchemy.db1.echo = false
351 sqlalchemy.db1.pool_recycle = 3600
356 sqlalchemy.db1.pool_recycle = 3600
352 sqlalchemy.db1.convert_unicode = true
357 sqlalchemy.db1.convert_unicode = true
353
358
354 ################################
359 ################################
355 ### LOGGING CONFIGURATION ####
360 ### LOGGING CONFIGURATION ####
356 ################################
361 ################################
357 [loggers]
362 [loggers]
358 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
363 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
359
364
360 [handlers]
365 [handlers]
361 keys = console, console_sql
366 keys = console, console_sql
362
367
363 [formatters]
368 [formatters]
364 keys = generic, color_formatter, color_formatter_sql
369 keys = generic, color_formatter, color_formatter_sql
365
370
366 #############
371 #############
367 ## LOGGERS ##
372 ## LOGGERS ##
368 #############
373 #############
369 [logger_root]
374 [logger_root]
370 level = NOTSET
375 level = NOTSET
371 handlers = console
376 handlers = console
372
377
373 [logger_routes]
378 [logger_routes]
374 level = DEBUG
379 level = DEBUG
375 handlers =
380 handlers =
376 qualname = routes.middleware
381 qualname = routes.middleware
377 # "level = DEBUG" logs the route matched and routing variables.
382 # "level = DEBUG" logs the route matched and routing variables.
378 propagate = 1
383 propagate = 1
379
384
380 [logger_beaker]
385 [logger_beaker]
381 level = DEBUG
386 level = DEBUG
382 handlers =
387 handlers =
383 qualname = beaker.container
388 qualname = beaker.container
384 propagate = 1
389 propagate = 1
385
390
386 [logger_templates]
391 [logger_templates]
387 level = INFO
392 level = INFO
388 handlers =
393 handlers =
389 qualname = pylons.templating
394 qualname = pylons.templating
390 propagate = 1
395 propagate = 1
391
396
392 [logger_rhodecode]
397 [logger_rhodecode]
393 level = DEBUG
398 level = DEBUG
394 handlers =
399 handlers =
395 qualname = rhodecode
400 qualname = rhodecode
396 propagate = 1
401 propagate = 1
397
402
398 [logger_sqlalchemy]
403 [logger_sqlalchemy]
399 level = INFO
404 level = INFO
400 handlers = console_sql
405 handlers = console_sql
401 qualname = sqlalchemy.engine
406 qualname = sqlalchemy.engine
402 propagate = 0
407 propagate = 0
403
408
404 [logger_whoosh_indexer]
409 [logger_whoosh_indexer]
405 level = DEBUG
410 level = DEBUG
406 handlers =
411 handlers =
407 qualname = whoosh_indexer
412 qualname = whoosh_indexer
408 propagate = 1
413 propagate = 1
409
414
410 ##############
415 ##############
411 ## HANDLERS ##
416 ## HANDLERS ##
412 ##############
417 ##############
413
418
414 [handler_console]
419 [handler_console]
415 class = StreamHandler
420 class = StreamHandler
416 args = (sys.stderr,)
421 args = (sys.stderr,)
417 level = INFO
422 level = INFO
418 formatter = generic
423 formatter = generic
419
424
420 [handler_console_sql]
425 [handler_console_sql]
421 class = StreamHandler
426 class = StreamHandler
422 args = (sys.stderr,)
427 args = (sys.stderr,)
423 level = WARN
428 level = WARN
424 formatter = generic
429 formatter = generic
425
430
426 ################
431 ################
427 ## FORMATTERS ##
432 ## FORMATTERS ##
428 ################
433 ################
429
434
430 [formatter_generic]
435 [formatter_generic]
431 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
436 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
432 datefmt = %Y-%m-%d %H:%M:%S
437 datefmt = %Y-%m-%d %H:%M:%S
433
438
434 [formatter_color_formatter]
439 [formatter_color_formatter]
435 class=rhodecode.lib.colored_formatter.ColorFormatter
440 class=rhodecode.lib.colored_formatter.ColorFormatter
436 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
441 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
437 datefmt = %Y-%m-%d %H:%M:%S
442 datefmt = %Y-%m-%d %H:%M:%S
438
443
439 [formatter_color_formatter_sql]
444 [formatter_color_formatter_sql]
440 class=rhodecode.lib.colored_formatter.ColorFormatterSql
445 class=rhodecode.lib.colored_formatter.ColorFormatterSql
441 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
446 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
442 datefmt = %Y-%m-%d %H:%M:%S
447 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,452 +1,457 b''
1 ################################################################################
1 ################################################################################
2 ################################################################################
2 ################################################################################
3 # RhodeCode - Pylons environment configuration #
3 # RhodeCode - Pylons environment configuration #
4 # #
4 # #
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10 pdebug = false
10 pdebug = false
11 ################################################################################
11 ################################################################################
12 ## Uncomment and replace with the address which should receive ##
12 ## Uncomment and replace with the address which should receive ##
13 ## any error reports after application crash ##
13 ## any error reports after application crash ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 ################################################################################
15 ################################################################################
16 #email_to = admin@localhost
16 #email_to = admin@localhost
17 #error_email_from = paste_error@localhost
17 #error_email_from = paste_error@localhost
18 #app_email_from = rhodecode-noreply@localhost
18 #app_email_from = rhodecode-noreply@localhost
19 #error_message =
19 #error_message =
20 #email_prefix = [RhodeCode]
20 #email_prefix = [RhodeCode]
21
21
22 #smtp_server = mail.server.com
22 #smtp_server = mail.server.com
23 #smtp_username =
23 #smtp_username =
24 #smtp_password =
24 #smtp_password =
25 #smtp_port =
25 #smtp_port =
26 #smtp_use_tls = false
26 #smtp_use_tls = false
27 #smtp_use_ssl = true
27 #smtp_use_ssl = true
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 #smtp_auth =
29 #smtp_auth =
30
30
31 [server:main]
31 [server:main]
32 ## PASTE
32 ## PASTE
33 ##nr of threads to spawn
33 ##nr of threads to spawn
34 #threadpool_workers = 5
34 #threadpool_workers = 5
35
35
36 ##max request before thread respawn
36 ##max request before thread respawn
37 #threadpool_max_requests = 10
37 #threadpool_max_requests = 10
38
38
39 ##option to use threads of process
39 ##option to use threads of process
40 #use_threadpool = true
40 #use_threadpool = true
41
41
42 #use = egg:Paste#http
42 #use = egg:Paste#http
43
43
44 #WAITRESS
44 #WAITRESS
45 threads = 5
45 threads = 5
46 #100GB
47 max_request_body_size = 107374182400
46 use = egg:waitress#main
48 use = egg:waitress#main
47
49
48 host = 127.0.0.1
50 host = 127.0.0.1
49 port = 5000
51 port = 5000
50
52
51 [filter:proxy-prefix]
53 [filter:proxy-prefix]
52 # prefix middleware for rc
54 # prefix middleware for rc
53 use = egg:PasteDeploy#prefix
55 use = egg:PasteDeploy#prefix
54 prefix = /<your-prefix>
56 prefix = /<your-prefix>
55
57
56 [app:main]
58 [app:main]
57 use = egg:rhodecode
59 use = egg:rhodecode
58 #filter-with = proxy-prefix
60 #filter-with = proxy-prefix
59 full_stack = true
61 full_stack = true
60 static_files = true
62 static_files = true
61 # Optional Languages
63 # Optional Languages
62 # en, fr, ja, pt_BR, zh_CN, zh_TW, pl
64 # en, fr, ja, pt_BR, zh_CN, zh_TW, pl
63 lang = en
65 lang = en
64 cache_dir = %(here)s/data
66 cache_dir = %(here)s/data
65 index_dir = %(here)s/data/index
67 index_dir = %(here)s/data/index
66 app_instance_uuid = ${app_instance_uuid}
68 app_instance_uuid = ${app_instance_uuid}
67 cut_off_limit = 256000
69 cut_off_limit = 256000
68 vcs_full_cache = True
70 vcs_full_cache = True
69 # force https in RhodeCode, fixes https redirects, assumes it's always https
71 # force https in RhodeCode, fixes https redirects, assumes it's always https
70 force_https = false
72 force_https = false
71 # use Strict-Transport-Security headers
73 # use Strict-Transport-Security headers
72 use_htsts = false
74 use_htsts = false
73 commit_parse_limit = 50
75 commit_parse_limit = 50
74 # number of items displayed in lightweight dashboard before paginating
76 # number of items displayed in lightweight dashboard before paginating
75 dashboard_items = 100
77 dashboard_items = 100
76 use_gravatar = true
78 use_gravatar = true
77
79
80 # path to git executable
81 git_path = git
82
78 ## RSS feed options
83 ## RSS feed options
79
84
80 rss_cut_off_limit = 256000
85 rss_cut_off_limit = 256000
81 rss_items_per_page = 10
86 rss_items_per_page = 10
82 rss_include_diff = false
87 rss_include_diff = false
83
88
84
89
85 ## alternative_gravatar_url allows you to use your own avatar server application
90 ## alternative_gravatar_url allows you to use your own avatar server application
86 ## the following parts of the URL will be replaced
91 ## the following parts of the URL will be replaced
87 ## {email} user email
92 ## {email} user email
88 ## {md5email} md5 hash of the user email (like at gravatar.com)
93 ## {md5email} md5 hash of the user email (like at gravatar.com)
89 ## {size} size of the image that is expected from the server application
94 ## {size} size of the image that is expected from the server application
90 ## {scheme} http/https from RhodeCode server
95 ## {scheme} http/https from RhodeCode server
91 ## {netloc} network location from RhodeCode server
96 ## {netloc} network location from RhodeCode server
92 #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
97 #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
93 #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
98 #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
94
99
95 container_auth_enabled = false
100 container_auth_enabled = false
96 proxypass_auth_enabled = false
101 proxypass_auth_enabled = false
97 ## default encoding used to convert from and to unicode
102 ## default encoding used to convert from and to unicode
98 ## can be also a comma seperated list of encoding in case of mixed encodings
103 ## can be also a comma seperated list of encoding in case of mixed encodings
99 default_encoding = utf8
104 default_encoding = utf8
100
105
101 ## overwrite schema of clone url
106 ## overwrite schema of clone url
102 ## available vars:
107 ## available vars:
103 ## scheme - http/https
108 ## scheme - http/https
104 ## user - current user
109 ## user - current user
105 ## pass - password
110 ## pass - password
106 ## netloc - network location
111 ## netloc - network location
107 ## path - usually repo_name
112 ## path - usually repo_name
108
113
109 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
114 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
110
115
111 ## issue tracking mapping for commits messages
116 ## issue tracking mapping for commits messages
112 ## comment out issue_pat, issue_server, issue_prefix to enable
117 ## comment out issue_pat, issue_server, issue_prefix to enable
113
118
114 ## pattern to get the issues from commit messages
119 ## pattern to get the issues from commit messages
115 ## default one used here is #<numbers> with a regex passive group for `#`
120 ## default one used here is #<numbers> with a regex passive group for `#`
116 ## {id} will be all groups matched from this pattern
121 ## {id} will be all groups matched from this pattern
117
122
118 issue_pat = (?:\s*#)(\d+)
123 issue_pat = (?:\s*#)(\d+)
119
124
120 ## server url to the issue, each {id} will be replaced with match
125 ## server url to the issue, each {id} will be replaced with match
121 ## fetched from the regex and {repo} is replaced with full repository name
126 ## fetched from the regex and {repo} is replaced with full repository name
122 ## including groups {repo_name} is replaced with just name of repo
127 ## including groups {repo_name} is replaced with just name of repo
123
128
124 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
129 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
125
130
126 ## prefix to add to link to indicate it's an url
131 ## prefix to add to link to indicate it's an url
127 ## #314 will be replaced by <issue_prefix><id>
132 ## #314 will be replaced by <issue_prefix><id>
128
133
129 issue_prefix = #
134 issue_prefix = #
130
135
131 ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
136 ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
132 ## multiple patterns, to other issues server, wiki or others
137 ## multiple patterns, to other issues server, wiki or others
133 ## below an example how to create a wiki pattern
138 ## below an example how to create a wiki pattern
134 # #wiki-some-id -> https://mywiki.com/some-id
139 # #wiki-some-id -> https://mywiki.com/some-id
135
140
136 #issue_pat_wiki = (?:wiki-)(.+)
141 #issue_pat_wiki = (?:wiki-)(.+)
137 #issue_server_link_wiki = https://mywiki.com/{id}
142 #issue_server_link_wiki = https://mywiki.com/{id}
138 #issue_prefix_wiki = WIKI-
143 #issue_prefix_wiki = WIKI-
139
144
140
145
141 ## instance-id prefix
146 ## instance-id prefix
142 ## a prefix key for this instance used for cache invalidation when running
147 ## a prefix key for this instance used for cache invalidation when running
143 ## multiple instances of rhodecode, make sure it's globally unique for
148 ## multiple instances of rhodecode, make sure it's globally unique for
144 ## all running rhodecode instances. Leave empty if you don't use it
149 ## all running rhodecode instances. Leave empty if you don't use it
145 instance_id =
150 instance_id =
146
151
147 ## alternative return HTTP header for failed authentication. Default HTTP
152 ## alternative return HTTP header for failed authentication. Default HTTP
148 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
153 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
149 ## handling that. Set this variable to 403 to return HTTPForbidden
154 ## handling that. Set this variable to 403 to return HTTPForbidden
150 auth_ret_code =
155 auth_ret_code =
151
156
152 ####################################
157 ####################################
153 ### CELERY CONFIG ####
158 ### CELERY CONFIG ####
154 ####################################
159 ####################################
155 use_celery = false
160 use_celery = false
156 broker.host = localhost
161 broker.host = localhost
157 broker.vhost = rabbitmqhost
162 broker.vhost = rabbitmqhost
158 broker.port = 5672
163 broker.port = 5672
159 broker.user = rabbitmq
164 broker.user = rabbitmq
160 broker.password = qweqwe
165 broker.password = qweqwe
161
166
162 celery.imports = rhodecode.lib.celerylib.tasks
167 celery.imports = rhodecode.lib.celerylib.tasks
163
168
164 celery.result.backend = amqp
169 celery.result.backend = amqp
165 celery.result.dburi = amqp://
170 celery.result.dburi = amqp://
166 celery.result.serialier = json
171 celery.result.serialier = json
167
172
168 #celery.send.task.error.emails = true
173 #celery.send.task.error.emails = true
169 #celery.amqp.task.result.expires = 18000
174 #celery.amqp.task.result.expires = 18000
170
175
171 celeryd.concurrency = 2
176 celeryd.concurrency = 2
172 #celeryd.log.file = celeryd.log
177 #celeryd.log.file = celeryd.log
173 celeryd.log.level = debug
178 celeryd.log.level = debug
174 celeryd.max.tasks.per.child = 1
179 celeryd.max.tasks.per.child = 1
175
180
176 #tasks will never be sent to the queue, but executed locally instead.
181 #tasks will never be sent to the queue, but executed locally instead.
177 celery.always.eager = false
182 celery.always.eager = false
178
183
179 ####################################
184 ####################################
180 ### BEAKER CACHE ####
185 ### BEAKER CACHE ####
181 ####################################
186 ####################################
182 beaker.cache.data_dir=%(here)s/data/cache/data
187 beaker.cache.data_dir=%(here)s/data/cache/data
183 beaker.cache.lock_dir=%(here)s/data/cache/lock
188 beaker.cache.lock_dir=%(here)s/data/cache/lock
184
189
185 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
190 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
186
191
187 beaker.cache.super_short_term.type=memory
192 beaker.cache.super_short_term.type=memory
188 beaker.cache.super_short_term.expire=10
193 beaker.cache.super_short_term.expire=10
189 beaker.cache.super_short_term.key_length = 256
194 beaker.cache.super_short_term.key_length = 256
190
195
191 beaker.cache.short_term.type=memory
196 beaker.cache.short_term.type=memory
192 beaker.cache.short_term.expire=60
197 beaker.cache.short_term.expire=60
193 beaker.cache.short_term.key_length = 256
198 beaker.cache.short_term.key_length = 256
194
199
195 beaker.cache.long_term.type=memory
200 beaker.cache.long_term.type=memory
196 beaker.cache.long_term.expire=36000
201 beaker.cache.long_term.expire=36000
197 beaker.cache.long_term.key_length = 256
202 beaker.cache.long_term.key_length = 256
198
203
199 beaker.cache.sql_cache_short.type=memory
204 beaker.cache.sql_cache_short.type=memory
200 beaker.cache.sql_cache_short.expire=10
205 beaker.cache.sql_cache_short.expire=10
201 beaker.cache.sql_cache_short.key_length = 256
206 beaker.cache.sql_cache_short.key_length = 256
202
207
203 beaker.cache.sql_cache_med.type=memory
208 beaker.cache.sql_cache_med.type=memory
204 beaker.cache.sql_cache_med.expire=360
209 beaker.cache.sql_cache_med.expire=360
205 beaker.cache.sql_cache_med.key_length = 256
210 beaker.cache.sql_cache_med.key_length = 256
206
211
207 beaker.cache.sql_cache_long.type=file
212 beaker.cache.sql_cache_long.type=file
208 beaker.cache.sql_cache_long.expire=3600
213 beaker.cache.sql_cache_long.expire=3600
209 beaker.cache.sql_cache_long.key_length = 256
214 beaker.cache.sql_cache_long.key_length = 256
210
215
211 ####################################
216 ####################################
212 ### BEAKER SESSION ####
217 ### BEAKER SESSION ####
213 ####################################
218 ####################################
214 ## Type of storage used for the session, current types are
219 ## Type of storage used for the session, current types are
215 ## dbm, file, memcached, database, and memory.
220 ## dbm, file, memcached, database, and memory.
216 ## The storage uses the Container API
221 ## The storage uses the Container API
217 ## that is also used by the cache system.
222 ## that is also used by the cache system.
218
223
219 ## db session ##
224 ## db session ##
220 #beaker.session.type = ext:database
225 #beaker.session.type = ext:database
221 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
226 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
222 #beaker.session.table_name = db_session
227 #beaker.session.table_name = db_session
223
228
224 ## encrypted cookie client side session, good for many instances ##
229 ## encrypted cookie client side session, good for many instances ##
225 #beaker.session.type = cookie
230 #beaker.session.type = cookie
226
231
227 ## file based cookies (default) ##
232 ## file based cookies (default) ##
228 #beaker.session.type = file
233 #beaker.session.type = file
229
234
230
235
231 beaker.session.key = rhodecode
236 beaker.session.key = rhodecode
232 ## secure cookie requires AES python libraries ##
237 ## secure cookie requires AES python libraries ##
233 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
238 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
234 #beaker.session.validate_key = 9712sds2212c--zxc123
239 #beaker.session.validate_key = 9712sds2212c--zxc123
235 ## sets session as invalid if it haven't been accessed for given amount of time
240 ## sets session as invalid if it haven't been accessed for given amount of time
236 beaker.session.timeout = 2592000
241 beaker.session.timeout = 2592000
237 beaker.session.httponly = true
242 beaker.session.httponly = true
238 #beaker.session.cookie_path = /<your-prefix>
243 #beaker.session.cookie_path = /<your-prefix>
239
244
240 ## uncomment for https secure cookie ##
245 ## uncomment for https secure cookie ##
241 beaker.session.secure = false
246 beaker.session.secure = false
242
247
243 ## auto save the session to not to use .save() ##
248 ## auto save the session to not to use .save() ##
244 beaker.session.auto = False
249 beaker.session.auto = False
245
250
246 ## default cookie expiration time in seconds `true` expire at browser close ##
251 ## default cookie expiration time in seconds `true` expire at browser close ##
247 #beaker.session.cookie_expires = 3600
252 #beaker.session.cookie_expires = 3600
248
253
249
254
250 ############################
255 ############################
251 ## ERROR HANDLING SYSTEMS ##
256 ## ERROR HANDLING SYSTEMS ##
252 ############################
257 ############################
253
258
254 ####################
259 ####################
255 ### [errormator] ###
260 ### [errormator] ###
256 ####################
261 ####################
257
262
258 # Errormator is tailored to work with RhodeCode, see
263 # Errormator is tailored to work with RhodeCode, see
259 # http://errormator.com for details how to obtain an account
264 # http://errormator.com for details how to obtain an account
260 # you must install python package `errormator_client` to make it work
265 # you must install python package `errormator_client` to make it work
261
266
262 # errormator enabled
267 # errormator enabled
263 errormator = true
268 errormator = true
264
269
265 errormator.server_url = https://api.errormator.com
270 errormator.server_url = https://api.errormator.com
266 errormator.api_key = YOUR_API_KEY
271 errormator.api_key = YOUR_API_KEY
267
272
268 # TWEAK AMOUNT OF INFO SENT HERE
273 # TWEAK AMOUNT OF INFO SENT HERE
269
274
270 # enables 404 error logging (default False)
275 # enables 404 error logging (default False)
271 errormator.report_404 = false
276 errormator.report_404 = false
272
277
273 # time in seconds after request is considered being slow (default 1)
278 # time in seconds after request is considered being slow (default 1)
274 errormator.slow_request_time = 1
279 errormator.slow_request_time = 1
275
280
276 # record slow requests in application
281 # record slow requests in application
277 # (needs to be enabled for slow datastore recording and time tracking)
282 # (needs to be enabled for slow datastore recording and time tracking)
278 errormator.slow_requests = true
283 errormator.slow_requests = true
279
284
280 # enable hooking to application loggers
285 # enable hooking to application loggers
281 # errormator.logging = true
286 # errormator.logging = true
282
287
283 # minimum log level for log capture
288 # minimum log level for log capture
284 # errormator.logging.level = WARNING
289 # errormator.logging.level = WARNING
285
290
286 # send logs only from erroneous/slow requests
291 # send logs only from erroneous/slow requests
287 # (saves API quota for intensive logging)
292 # (saves API quota for intensive logging)
288 errormator.logging_on_error = false
293 errormator.logging_on_error = false
289
294
290 # list of additonal keywords that should be grabbed from environ object
295 # list of additonal keywords that should be grabbed from environ object
291 # can be string with comma separated list of words in lowercase
296 # can be string with comma separated list of words in lowercase
292 # (by default client will always send following info:
297 # (by default client will always send following info:
293 # 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
298 # 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
294 # start with HTTP* this list be extended with additional keywords here
299 # start with HTTP* this list be extended with additional keywords here
295 errormator.environ_keys_whitelist =
300 errormator.environ_keys_whitelist =
296
301
297
302
298 # list of keywords that should be blanked from request object
303 # list of keywords that should be blanked from request object
299 # can be string with comma separated list of words in lowercase
304 # can be string with comma separated list of words in lowercase
300 # (by default client will always blank keys that contain following words
305 # (by default client will always blank keys that contain following words
301 # 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
306 # 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
302 # this list be extended with additional keywords set here
307 # this list be extended with additional keywords set here
303 errormator.request_keys_blacklist =
308 errormator.request_keys_blacklist =
304
309
305
310
306 # list of namespaces that should be ignores when gathering log entries
311 # list of namespaces that should be ignores when gathering log entries
307 # can be string with comma separated list of namespaces
312 # can be string with comma separated list of namespaces
308 # (by default the client ignores own entries: errormator_client.client)
313 # (by default the client ignores own entries: errormator_client.client)
309 errormator.log_namespace_blacklist =
314 errormator.log_namespace_blacklist =
310
315
311
316
312 ################
317 ################
313 ### [sentry] ###
318 ### [sentry] ###
314 ################
319 ################
315
320
316 # sentry is a alternative open source error aggregator
321 # sentry is a alternative open source error aggregator
317 # you must install python packages `sentry` and `raven` to enable
322 # you must install python packages `sentry` and `raven` to enable
318
323
319 sentry.dsn = YOUR_DNS
324 sentry.dsn = YOUR_DNS
320 sentry.servers =
325 sentry.servers =
321 sentry.name =
326 sentry.name =
322 sentry.key =
327 sentry.key =
323 sentry.public_key =
328 sentry.public_key =
324 sentry.secret_key =
329 sentry.secret_key =
325 sentry.project =
330 sentry.project =
326 sentry.site =
331 sentry.site =
327 sentry.include_paths =
332 sentry.include_paths =
328 sentry.exclude_paths =
333 sentry.exclude_paths =
329
334
330
335
331 ################################################################################
336 ################################################################################
332 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
337 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
333 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
338 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
334 ## execute malicious code after an exception is raised. ##
339 ## execute malicious code after an exception is raised. ##
335 ################################################################################
340 ################################################################################
336 set debug = false
341 set debug = false
337
342
338 ##################################
343 ##################################
339 ### LOGVIEW CONFIG ###
344 ### LOGVIEW CONFIG ###
340 ##################################
345 ##################################
341 logview.sqlalchemy = #faa
346 logview.sqlalchemy = #faa
342 logview.pylons.templating = #bfb
347 logview.pylons.templating = #bfb
343 logview.pylons.util = #eee
348 logview.pylons.util = #eee
344
349
345 #########################################################
350 #########################################################
346 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
351 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
347 #########################################################
352 #########################################################
348
353
349 # SQLITE [default]
354 # SQLITE [default]
350 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
355 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
351
356
352 # POSTGRESQL
357 # POSTGRESQL
353 # sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode
358 # sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode
354
359
355 # MySQL
360 # MySQL
356 # sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode
361 # sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode
357
362
358 # see sqlalchemy docs for others
363 # see sqlalchemy docs for others
359
364
360 sqlalchemy.db1.echo = false
365 sqlalchemy.db1.echo = false
361 sqlalchemy.db1.pool_recycle = 3600
366 sqlalchemy.db1.pool_recycle = 3600
362 sqlalchemy.db1.convert_unicode = true
367 sqlalchemy.db1.convert_unicode = true
363
368
364 ################################
369 ################################
365 ### LOGGING CONFIGURATION ####
370 ### LOGGING CONFIGURATION ####
366 ################################
371 ################################
367 [loggers]
372 [loggers]
368 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
373 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
369
374
370 [handlers]
375 [handlers]
371 keys = console, console_sql
376 keys = console, console_sql
372
377
373 [formatters]
378 [formatters]
374 keys = generic, color_formatter, color_formatter_sql
379 keys = generic, color_formatter, color_formatter_sql
375
380
376 #############
381 #############
377 ## LOGGERS ##
382 ## LOGGERS ##
378 #############
383 #############
379 [logger_root]
384 [logger_root]
380 level = NOTSET
385 level = NOTSET
381 handlers = console
386 handlers = console
382
387
383 [logger_routes]
388 [logger_routes]
384 level = DEBUG
389 level = DEBUG
385 handlers =
390 handlers =
386 qualname = routes.middleware
391 qualname = routes.middleware
387 # "level = DEBUG" logs the route matched and routing variables.
392 # "level = DEBUG" logs the route matched and routing variables.
388 propagate = 1
393 propagate = 1
389
394
390 [logger_beaker]
395 [logger_beaker]
391 level = DEBUG
396 level = DEBUG
392 handlers =
397 handlers =
393 qualname = beaker.container
398 qualname = beaker.container
394 propagate = 1
399 propagate = 1
395
400
396 [logger_templates]
401 [logger_templates]
397 level = INFO
402 level = INFO
398 handlers =
403 handlers =
399 qualname = pylons.templating
404 qualname = pylons.templating
400 propagate = 1
405 propagate = 1
401
406
402 [logger_rhodecode]
407 [logger_rhodecode]
403 level = DEBUG
408 level = DEBUG
404 handlers =
409 handlers =
405 qualname = rhodecode
410 qualname = rhodecode
406 propagate = 1
411 propagate = 1
407
412
408 [logger_sqlalchemy]
413 [logger_sqlalchemy]
409 level = INFO
414 level = INFO
410 handlers = console_sql
415 handlers = console_sql
411 qualname = sqlalchemy.engine
416 qualname = sqlalchemy.engine
412 propagate = 0
417 propagate = 0
413
418
414 [logger_whoosh_indexer]
419 [logger_whoosh_indexer]
415 level = DEBUG
420 level = DEBUG
416 handlers =
421 handlers =
417 qualname = whoosh_indexer
422 qualname = whoosh_indexer
418 propagate = 1
423 propagate = 1
419
424
420 ##############
425 ##############
421 ## HANDLERS ##
426 ## HANDLERS ##
422 ##############
427 ##############
423
428
424 [handler_console]
429 [handler_console]
425 class = StreamHandler
430 class = StreamHandler
426 args = (sys.stderr,)
431 args = (sys.stderr,)
427 level = INFO
432 level = INFO
428 formatter = generic
433 formatter = generic
429
434
430 [handler_console_sql]
435 [handler_console_sql]
431 class = StreamHandler
436 class = StreamHandler
432 args = (sys.stderr,)
437 args = (sys.stderr,)
433 level = WARN
438 level = WARN
434 formatter = generic
439 formatter = generic
435
440
436 ################
441 ################
437 ## FORMATTERS ##
442 ## FORMATTERS ##
438 ################
443 ################
439
444
440 [formatter_generic]
445 [formatter_generic]
441 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
446 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
442 datefmt = %Y-%m-%d %H:%M:%S
447 datefmt = %Y-%m-%d %H:%M:%S
443
448
444 [formatter_color_formatter]
449 [formatter_color_formatter]
445 class=rhodecode.lib.colored_formatter.ColorFormatter
450 class=rhodecode.lib.colored_formatter.ColorFormatter
446 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
451 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
447 datefmt = %Y-%m-%d %H:%M:%S
452 datefmt = %Y-%m-%d %H:%M:%S
448
453
449 [formatter_color_formatter_sql]
454 [formatter_color_formatter_sql]
450 class=rhodecode.lib.colored_formatter.ColorFormatterSql
455 class=rhodecode.lib.colored_formatter.ColorFormatterSql
451 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
456 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
452 datefmt = %Y-%m-%d %H:%M:%S
457 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,200 +1,203 b''
1 import os
1 import os
2 import socket
2 import socket
3 import logging
3 import logging
4 import subprocess
4 import subprocess
5 import traceback
5 import traceback
6
6
7 from webob import Request, Response, exc
7 from webob import Request, Response, exc
8
8
9 import rhodecode
9 from rhodecode.lib import subprocessio
10 from rhodecode.lib import subprocessio
10
11
11 log = logging.getLogger(__name__)
12 log = logging.getLogger(__name__)
12
13
13
14
14 class FileWrapper(object):
15 class FileWrapper(object):
15
16
16 def __init__(self, fd, content_length):
17 def __init__(self, fd, content_length):
17 self.fd = fd
18 self.fd = fd
18 self.content_length = content_length
19 self.content_length = content_length
19 self.remain = content_length
20 self.remain = content_length
20
21
21 def read(self, size):
22 def read(self, size):
22 if size <= self.remain:
23 if size <= self.remain:
23 try:
24 try:
24 data = self.fd.read(size)
25 data = self.fd.read(size)
25 except socket.error:
26 except socket.error:
26 raise IOError(self)
27 raise IOError(self)
27 self.remain -= size
28 self.remain -= size
28 elif self.remain:
29 elif self.remain:
29 data = self.fd.read(self.remain)
30 data = self.fd.read(self.remain)
30 self.remain = 0
31 self.remain = 0
31 else:
32 else:
32 data = None
33 data = None
33 return data
34 return data
34
35
35 def __repr__(self):
36 def __repr__(self):
36 return '<FileWrapper %s len: %s, read: %s>' % (
37 return '<FileWrapper %s len: %s, read: %s>' % (
37 self.fd, self.content_length, self.content_length - self.remain
38 self.fd, self.content_length, self.content_length - self.remain
38 )
39 )
39
40
40
41
41 class GitRepository(object):
42 class GitRepository(object):
42 git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
43 git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
43 commands = ['git-upload-pack', 'git-receive-pack']
44 commands = ['git-upload-pack', 'git-receive-pack']
44
45
45 def __init__(self, repo_name, content_path, extras):
46 def __init__(self, repo_name, content_path, extras):
46 files = set([f.lower() for f in os.listdir(content_path)])
47 files = set([f.lower() for f in os.listdir(content_path)])
47 if not (self.git_folder_signature.intersection(files)
48 if not (self.git_folder_signature.intersection(files)
48 == self.git_folder_signature):
49 == self.git_folder_signature):
49 raise OSError('%s missing git signature' % content_path)
50 raise OSError('%s missing git signature' % content_path)
50 self.content_path = content_path
51 self.content_path = content_path
51 self.valid_accepts = ['application/x-%s-result' %
52 self.valid_accepts = ['application/x-%s-result' %
52 c for c in self.commands]
53 c for c in self.commands]
53 self.repo_name = repo_name
54 self.repo_name = repo_name
54 self.extras = extras
55 self.extras = extras
55
56
56 def _get_fixedpath(self, path):
57 def _get_fixedpath(self, path):
57 """
58 """
58 Small fix for repo_path
59 Small fix for repo_path
59
60
60 :param path:
61 :param path:
61 :type path:
62 :type path:
62 """
63 """
63 return path.split(self.repo_name, 1)[-1].strip('/')
64 return path.split(self.repo_name, 1)[-1].strip('/')
64
65
65 def inforefs(self, request, environ):
66 def inforefs(self, request, environ):
66 """
67 """
67 WSGI Response producer for HTTP GET Git Smart
68 WSGI Response producer for HTTP GET Git Smart
68 HTTP /info/refs request.
69 HTTP /info/refs request.
69 """
70 """
70
71
71 git_command = request.GET.get('service')
72 git_command = request.GET.get('service')
72 if git_command not in self.commands:
73 if git_command not in self.commands:
73 log.debug('command %s not allowed' % git_command)
74 log.debug('command %s not allowed' % git_command)
74 return exc.HTTPMethodNotAllowed()
75 return exc.HTTPMethodNotAllowed()
75
76
76 # note to self:
77 # note to self:
77 # please, resist the urge to add '\n' to git capture and increment
78 # please, resist the urge to add '\n' to git capture and increment
78 # line count by 1.
79 # line count by 1.
79 # The code in Git client not only does NOT need '\n', but actually
80 # The code in Git client not only does NOT need '\n', but actually
80 # blows up if you sprinkle "flush" (0000) as "0001\n".
81 # blows up if you sprinkle "flush" (0000) as "0001\n".
81 # It reads binary, per number of bytes specified.
82 # It reads binary, per number of bytes specified.
82 # if you do add '\n' as part of data, count it.
83 # if you do add '\n' as part of data, count it.
83 server_advert = '# service=%s' % git_command
84 server_advert = '# service=%s' % git_command
84 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
85 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
86 _git_path = rhodecode.CONFIG.get('git_path', 'git')
85 try:
87 try:
86 out = subprocessio.SubprocessIOChunker(
88 out = subprocessio.SubprocessIOChunker(
87 r'git %s --stateless-rpc --advertise-refs "%s"' % (
89 r'%s %s --stateless-rpc --advertise-refs "%s"' % (
88 git_command[4:], self.content_path),
90 _git_path, git_command[4:], self.content_path),
89 starting_values=[
91 starting_values=[
90 packet_len + server_advert + '0000'
92 packet_len + server_advert + '0000'
91 ]
93 ]
92 )
94 )
93 except EnvironmentError, e:
95 except EnvironmentError, e:
94 log.error(traceback.format_exc())
96 log.error(traceback.format_exc())
95 raise exc.HTTPExpectationFailed()
97 raise exc.HTTPExpectationFailed()
96 resp = Response()
98 resp = Response()
97 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
99 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
98 resp.charset = None
100 resp.charset = None
99 resp.app_iter = out
101 resp.app_iter = out
100 return resp
102 return resp
101
103
102 def backend(self, request, environ):
104 def backend(self, request, environ):
103 """
105 """
104 WSGI Response producer for HTTP POST Git Smart HTTP requests.
106 WSGI Response producer for HTTP POST Git Smart HTTP requests.
105 Reads commands and data from HTTP POST's body.
107 Reads commands and data from HTTP POST's body.
106 returns an iterator obj with contents of git command's
108 returns an iterator obj with contents of git command's
107 response to stdout
109 response to stdout
108 """
110 """
109 git_command = self._get_fixedpath(request.path_info)
111 git_command = self._get_fixedpath(request.path_info)
110 if git_command not in self.commands:
112 if git_command not in self.commands:
111 log.debug('command %s not allowed' % git_command)
113 log.debug('command %s not allowed' % git_command)
112 return exc.HTTPMethodNotAllowed()
114 return exc.HTTPMethodNotAllowed()
113
115
114 if 'CONTENT_LENGTH' in environ:
116 if 'CONTENT_LENGTH' in environ:
115 inputstream = FileWrapper(environ['wsgi.input'],
117 inputstream = FileWrapper(environ['wsgi.input'],
116 request.content_length)
118 request.content_length)
117 else:
119 else:
118 inputstream = environ['wsgi.input']
120 inputstream = environ['wsgi.input']
119
121
120 try:
122 try:
121 gitenv = os.environ
123 gitenv = os.environ
122 from rhodecode.lib.compat import json
124 from rhodecode.lib.compat import json
123 gitenv['RHODECODE_EXTRAS'] = json.dumps(self.extras)
125 gitenv['RHODECODE_EXTRAS'] = json.dumps(self.extras)
124 # forget all configs
126 # forget all configs
125 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
127 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
126 opts = dict(
128 opts = dict(
127 env=gitenv,
129 env=gitenv,
128 cwd=os.getcwd()
130 cwd=os.getcwd()
129 )
131 )
130 cmd = r'git %s --stateless-rpc "%s"' % (git_command[4:],
132 cmd = r'git %s --stateless-rpc "%s"' % (git_command[4:],
131 self.content_path),
133 self.content_path),
132 log.debug('handling cmd %s' % cmd)
134 log.debug('handling cmd %s' % cmd)
133 out = subprocessio.SubprocessIOChunker(
135 out = subprocessio.SubprocessIOChunker(
134 cmd,
136 cmd,
135 inputstream=inputstream,
137 inputstream=inputstream,
136 **opts
138 **opts
137 )
139 )
138 except EnvironmentError, e:
140 except EnvironmentError, e:
139 log.error(traceback.format_exc())
141 log.error(traceback.format_exc())
140 raise exc.HTTPExpectationFailed()
142 raise exc.HTTPExpectationFailed()
141
143
142 if git_command in [u'git-receive-pack']:
144 if git_command in [u'git-receive-pack']:
143 # updating refs manually after each push.
145 # updating refs manually after each push.
144 # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
146 # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
145 cmd = (u'git --git-dir "%s" '
147 _git_path = rhodecode.CONFIG.get('git_path', 'git')
146 'update-server-info' % self.content_path)
148 cmd = (u'%s --git-dir "%s" '
149 'update-server-info' % (_git_path, self.content_path))
147 log.debug('handling cmd %s' % cmd)
150 log.debug('handling cmd %s' % cmd)
148 subprocess.call(cmd, shell=True)
151 subprocess.call(cmd, shell=True)
149
152
150 resp = Response()
153 resp = Response()
151 resp.content_type = 'application/x-%s-result' % git_command.encode('utf8')
154 resp.content_type = 'application/x-%s-result' % git_command.encode('utf8')
152 resp.charset = None
155 resp.charset = None
153 resp.app_iter = out
156 resp.app_iter = out
154 return resp
157 return resp
155
158
156 def __call__(self, environ, start_response):
159 def __call__(self, environ, start_response):
157 request = Request(environ)
160 request = Request(environ)
158 _path = self._get_fixedpath(request.path_info)
161 _path = self._get_fixedpath(request.path_info)
159 if _path.startswith('info/refs'):
162 if _path.startswith('info/refs'):
160 app = self.inforefs
163 app = self.inforefs
161 elif [a for a in self.valid_accepts if a in request.accept]:
164 elif [a for a in self.valid_accepts if a in request.accept]:
162 app = self.backend
165 app = self.backend
163 try:
166 try:
164 resp = app(request, environ)
167 resp = app(request, environ)
165 except exc.HTTPException, e:
168 except exc.HTTPException, e:
166 resp = e
169 resp = e
167 log.error(traceback.format_exc())
170 log.error(traceback.format_exc())
168 except Exception, e:
171 except Exception, e:
169 log.error(traceback.format_exc())
172 log.error(traceback.format_exc())
170 resp = exc.HTTPInternalServerError()
173 resp = exc.HTTPInternalServerError()
171 return resp(environ, start_response)
174 return resp(environ, start_response)
172
175
173
176
174 class GitDirectory(object):
177 class GitDirectory(object):
175
178
176 def __init__(self, repo_root, repo_name, extras):
179 def __init__(self, repo_root, repo_name, extras):
177 repo_location = os.path.join(repo_root, repo_name)
180 repo_location = os.path.join(repo_root, repo_name)
178 if not os.path.isdir(repo_location):
181 if not os.path.isdir(repo_location):
179 raise OSError(repo_location)
182 raise OSError(repo_location)
180
183
181 self.content_path = repo_location
184 self.content_path = repo_location
182 self.repo_name = repo_name
185 self.repo_name = repo_name
183 self.repo_location = repo_location
186 self.repo_location = repo_location
184 self.extras = extras
187 self.extras = extras
185
188
186 def __call__(self, environ, start_response):
189 def __call__(self, environ, start_response):
187 content_path = self.content_path
190 content_path = self.content_path
188 try:
191 try:
189 app = GitRepository(self.repo_name, content_path, self.extras)
192 app = GitRepository(self.repo_name, content_path, self.extras)
190 except (AssertionError, OSError):
193 except (AssertionError, OSError):
191 content_path = os.path.join(content_path, '.git')
194 content_path = os.path.join(content_path, '.git')
192 if os.path.isdir(content_path):
195 if os.path.isdir(content_path):
193 app = GitRepository(self.repo_name, content_path, self.extras)
196 app = GitRepository(self.repo_name, content_path, self.extras)
194 else:
197 else:
195 return exc.HTTPNotFound()(environ, start_response)
198 return exc.HTTPNotFound()(environ, start_response)
196 return app(environ, start_response)
199 return app(environ, start_response)
197
200
198
201
199 def make_wsgi_app(repo_name, repo_root, extras):
202 def make_wsgi_app(repo_name, repo_root, extras):
200 return GitDirectory(repo_root, repo_name, extras)
203 return GitDirectory(repo_root, repo_name, extras)
@@ -1,801 +1,800 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 import decorator
35 import decorator
36 import warnings
36 import warnings
37 from os.path import abspath
37 from os.path import abspath
38 from os.path import dirname as dn, join as jn
38 from os.path import dirname as dn, join as jn
39
39
40 from paste.script.command import Command, BadCommand
40 from paste.script.command import Command, BadCommand
41
41
42 from mercurial import ui, config
42 from mercurial import ui, config
43
43
44 from webhelpers.text import collapse, remove_formatting, strip_tags
44 from webhelpers.text import collapse, remove_formatting, strip_tags
45
45
46 from rhodecode.lib.vcs import get_backend
46 from rhodecode.lib.vcs import get_backend
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
49 from rhodecode.lib.vcs.utils.helpers import get_scm
49 from rhodecode.lib.vcs.utils.helpers import get_scm
50 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.exceptions import VCSError
51
51
52 from rhodecode.lib.caching_query import FromCache
52 from rhodecode.lib.caching_query import FromCache
53
53
54 from rhodecode.model import meta
54 from rhodecode.model import meta
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
57 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
58 from rhodecode.model.repos_group import ReposGroupModel
58 from rhodecode.model.repos_group import ReposGroupModel
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
60 from rhodecode.lib.vcs.utils.fakemod import create_module
60 from rhodecode.lib.vcs.utils.fakemod import create_module
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65
65
66
66
67 def recursive_replace(str_, replace=' '):
67 def recursive_replace(str_, replace=' '):
68 """
68 """
69 Recursive replace of given sign to just one instance
69 Recursive replace of given sign to just one instance
70
70
71 :param str_: given string
71 :param str_: given string
72 :param replace: char to find and replace multiple instances
72 :param replace: char to find and replace multiple instances
73
73
74 Examples::
74 Examples::
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 'Mighty-Mighty-Bo-sstones'
76 'Mighty-Mighty-Bo-sstones'
77 """
77 """
78
78
79 if str_.find(replace * 2) == -1:
79 if str_.find(replace * 2) == -1:
80 return str_
80 return str_
81 else:
81 else:
82 str_ = str_.replace(replace * 2, replace)
82 str_ = str_.replace(replace * 2, replace)
83 return recursive_replace(str_, replace)
83 return recursive_replace(str_, replace)
84
84
85
85
86 def repo_name_slug(value):
86 def repo_name_slug(value):
87 """
87 """
88 Return slug of name of repository
88 Return slug of name of repository
89 This function is called on each creation/modification
89 This function is called on each creation/modification
90 of repository to prevent bad names in repo
90 of repository to prevent bad names in repo
91 """
91 """
92
92
93 slug = remove_formatting(value)
93 slug = remove_formatting(value)
94 slug = strip_tags(slug)
94 slug = strip_tags(slug)
95
95
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 slug = slug.replace(c, '-')
97 slug = slug.replace(c, '-')
98 slug = recursive_replace(slug, '-')
98 slug = recursive_replace(slug, '-')
99 slug = collapse(slug, '-')
99 slug = collapse(slug, '-')
100 return slug
100 return slug
101
101
102
102
103 def get_repo_slug(request):
103 def get_repo_slug(request):
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 if _repo:
105 if _repo:
106 _repo = _repo.rstrip('/')
106 _repo = _repo.rstrip('/')
107 return _repo
107 return _repo
108
108
109
109
110 def get_repos_group_slug(request):
110 def get_repos_group_slug(request):
111 _group = request.environ['pylons.routes_dict'].get('group_name')
111 _group = request.environ['pylons.routes_dict'].get('group_name')
112 if _group:
112 if _group:
113 _group = _group.rstrip('/')
113 _group = _group.rstrip('/')
114 return _group
114 return _group
115
115
116
116
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
118 """
118 """
119 Action logger for various actions made by users
119 Action logger for various actions made by users
120
120
121 :param user: user that made this action, can be a unique username string or
121 :param user: user that made this action, can be a unique username string or
122 object containing user_id attribute
122 object containing user_id attribute
123 :param action: action to log, should be on of predefined unique actions for
123 :param action: action to log, should be on of predefined unique actions for
124 easy translations
124 easy translations
125 :param repo: string name of repository or object containing repo_id,
125 :param repo: string name of repository or object containing repo_id,
126 that action was made on
126 that action was made on
127 :param ipaddr: optional ip address from what the action was made
127 :param ipaddr: optional ip address from what the action was made
128 :param sa: optional sqlalchemy session
128 :param sa: optional sqlalchemy session
129
129
130 """
130 """
131
131
132 if not sa:
132 if not sa:
133 sa = meta.Session()
133 sa = meta.Session()
134
134
135 try:
135 try:
136 if hasattr(user, 'user_id'):
136 if hasattr(user, 'user_id'):
137 user_obj = User.get(user.user_id)
137 user_obj = User.get(user.user_id)
138 elif isinstance(user, basestring):
138 elif isinstance(user, basestring):
139 user_obj = User.get_by_username(user)
139 user_obj = User.get_by_username(user)
140 else:
140 else:
141 raise Exception('You have to provide a user object or a username')
141 raise Exception('You have to provide a user object or a username')
142
142
143 if hasattr(repo, 'repo_id'):
143 if hasattr(repo, 'repo_id'):
144 repo_obj = Repository.get(repo.repo_id)
144 repo_obj = Repository.get(repo.repo_id)
145 repo_name = repo_obj.repo_name
145 repo_name = repo_obj.repo_name
146 elif isinstance(repo, basestring):
146 elif isinstance(repo, basestring):
147 repo_name = repo.lstrip('/')
147 repo_name = repo.lstrip('/')
148 repo_obj = Repository.get_by_repo_name(repo_name)
148 repo_obj = Repository.get_by_repo_name(repo_name)
149 else:
149 else:
150 repo_obj = None
150 repo_obj = None
151 repo_name = ''
151 repo_name = ''
152
152
153 user_log = UserLog()
153 user_log = UserLog()
154 user_log.user_id = user_obj.user_id
154 user_log.user_id = user_obj.user_id
155 user_log.username = user_obj.username
155 user_log.username = user_obj.username
156 user_log.action = safe_unicode(action)
156 user_log.action = safe_unicode(action)
157
157
158 user_log.repository = repo_obj
158 user_log.repository = repo_obj
159 user_log.repository_name = repo_name
159 user_log.repository_name = repo_name
160
160
161 user_log.action_date = datetime.datetime.now()
161 user_log.action_date = datetime.datetime.now()
162 user_log.user_ip = ipaddr
162 user_log.user_ip = ipaddr
163 sa.add(user_log)
163 sa.add(user_log)
164
164
165 log.info('Logging action %s on %s by %s' %
165 log.info('Logging action %s on %s by %s' %
166 (action, safe_unicode(repo), user_obj))
166 (action, safe_unicode(repo), user_obj))
167 if commit:
167 if commit:
168 sa.commit()
168 sa.commit()
169 except:
169 except:
170 log.error(traceback.format_exc())
170 log.error(traceback.format_exc())
171 raise
171 raise
172
172
173
173
174 def get_repos(path, recursive=False, skip_removed_repos=True):
174 def get_repos(path, recursive=False, skip_removed_repos=True):
175 """
175 """
176 Scans given path for repos and return (name,(type,path)) tuple
176 Scans given path for repos and return (name,(type,path)) tuple
177
177
178 :param path: path to scan for repositories
178 :param path: path to scan for repositories
179 :param recursive: recursive search and return names with subdirs in front
179 :param recursive: recursive search and return names with subdirs in front
180 """
180 """
181
181
182 # remove ending slash for better results
182 # remove ending slash for better results
183 path = path.rstrip(os.sep)
183 path = path.rstrip(os.sep)
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
185
185
186 def _get_repos(p):
186 def _get_repos(p):
187 if not os.access(p, os.W_OK):
187 if not os.access(p, os.W_OK):
188 return
188 return
189 for dirpath in os.listdir(p):
189 for dirpath in os.listdir(p):
190 if os.path.isfile(os.path.join(p, dirpath)):
190 if os.path.isfile(os.path.join(p, dirpath)):
191 continue
191 continue
192 cur_path = os.path.join(p, dirpath)
192 cur_path = os.path.join(p, dirpath)
193
193
194 # skip removed repos
194 # skip removed repos
195 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
195 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
196 continue
196 continue
197
197
198 #skip .<somethin> dirs
198 #skip .<somethin> dirs
199 if dirpath.startswith('.'):
199 if dirpath.startswith('.'):
200 continue
200 continue
201
201
202 try:
202 try:
203 scm_info = get_scm(cur_path)
203 scm_info = get_scm(cur_path)
204 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
204 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
205 except VCSError:
205 except VCSError:
206 if not recursive:
206 if not recursive:
207 continue
207 continue
208 #check if this dir containts other repos for recursive scan
208 #check if this dir containts other repos for recursive scan
209 rec_path = os.path.join(p, dirpath)
209 rec_path = os.path.join(p, dirpath)
210 if os.path.isdir(rec_path):
210 if os.path.isdir(rec_path):
211 for inner_scm in _get_repos(rec_path):
211 for inner_scm in _get_repos(rec_path):
212 yield inner_scm
212 yield inner_scm
213
213
214 return _get_repos(path)
214 return _get_repos(path)
215
215
216 #alias for backward compat
216 #alias for backward compat
217 get_filesystem_repos = get_repos
217 get_filesystem_repos = get_repos
218
218
219
219
220 def is_valid_repo(repo_name, base_path, scm=None):
220 def is_valid_repo(repo_name, base_path, scm=None):
221 """
221 """
222 Returns True if given path is a valid repository False otherwise.
222 Returns True if given path is a valid repository False otherwise.
223 If scm param is given also compare if given scm is the same as expected
223 If scm param is given also compare if given scm is the same as expected
224 from scm parameter
224 from scm parameter
225
225
226 :param repo_name:
226 :param repo_name:
227 :param base_path:
227 :param base_path:
228 :param scm:
228 :param scm:
229
229
230 :return True: if given path is a valid repository
230 :return True: if given path is a valid repository
231 """
231 """
232 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
232 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
233
233
234 try:
234 try:
235 scm_ = get_scm(full_path)
235 scm_ = get_scm(full_path)
236 if scm:
236 if scm:
237 return scm_[0] == scm
237 return scm_[0] == scm
238 return True
238 return True
239 except VCSError:
239 except VCSError:
240 return False
240 return False
241
241
242
242
243 def is_valid_repos_group(repos_group_name, base_path):
243 def is_valid_repos_group(repos_group_name, base_path):
244 """
244 """
245 Returns True if given path is a repos group False otherwise
245 Returns True if given path is a repos group False otherwise
246
246
247 :param repo_name:
247 :param repo_name:
248 :param base_path:
248 :param base_path:
249 """
249 """
250 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
250 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
251
251
252 # check if it's not a repo
252 # check if it's not a repo
253 if is_valid_repo(repos_group_name, base_path):
253 if is_valid_repo(repos_group_name, base_path):
254 return False
254 return False
255
255
256 try:
256 try:
257 # we need to check bare git repos at higher level
257 # we need to check bare git repos at higher level
258 # since we might match branches/hooks/info/objects or possible
258 # since we might match branches/hooks/info/objects or possible
259 # other things inside bare git repo
259 # other things inside bare git repo
260 get_scm(os.path.dirname(full_path))
260 get_scm(os.path.dirname(full_path))
261 return False
261 return False
262 except VCSError:
262 except VCSError:
263 pass
263 pass
264
264
265 # check if it's a valid path
265 # check if it's a valid path
266 if os.path.isdir(full_path):
266 if os.path.isdir(full_path):
267 return True
267 return True
268
268
269 return False
269 return False
270
270
271
271
272 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
272 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
273 while True:
273 while True:
274 ok = raw_input(prompt)
274 ok = raw_input(prompt)
275 if ok in ('y', 'ye', 'yes'):
275 if ok in ('y', 'ye', 'yes'):
276 return True
276 return True
277 if ok in ('n', 'no', 'nop', 'nope'):
277 if ok in ('n', 'no', 'nop', 'nope'):
278 return False
278 return False
279 retries = retries - 1
279 retries = retries - 1
280 if retries < 0:
280 if retries < 0:
281 raise IOError
281 raise IOError
282 print complaint
282 print complaint
283
283
284 #propagated from mercurial documentation
284 #propagated from mercurial documentation
285 ui_sections = ['alias', 'auth',
285 ui_sections = ['alias', 'auth',
286 'decode/encode', 'defaults',
286 'decode/encode', 'defaults',
287 'diff', 'email',
287 'diff', 'email',
288 'extensions', 'format',
288 'extensions', 'format',
289 'merge-patterns', 'merge-tools',
289 'merge-patterns', 'merge-tools',
290 'hooks', 'http_proxy',
290 'hooks', 'http_proxy',
291 'smtp', 'patch',
291 'smtp', 'patch',
292 'paths', 'profiling',
292 'paths', 'profiling',
293 'server', 'trusted',
293 'server', 'trusted',
294 'ui', 'web', ]
294 'ui', 'web', ]
295
295
296
296
297 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
297 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
298 """
298 """
299 A function that will read python rc files or database
299 A function that will read python rc files or database
300 and make an mercurial ui object from read options
300 and make an mercurial ui object from read options
301
301
302 :param path: path to mercurial config file
302 :param path: path to mercurial config file
303 :param checkpaths: check the path
303 :param checkpaths: check the path
304 :param read_from: read from 'file' or 'db'
304 :param read_from: read from 'file' or 'db'
305 """
305 """
306
306
307 baseui = ui.ui()
307 baseui = ui.ui()
308
308
309 # clean the baseui object
309 # clean the baseui object
310 baseui._ocfg = config.config()
310 baseui._ocfg = config.config()
311 baseui._ucfg = config.config()
311 baseui._ucfg = config.config()
312 baseui._tcfg = config.config()
312 baseui._tcfg = config.config()
313
313
314 if read_from == 'file':
314 if read_from == 'file':
315 if not os.path.isfile(path):
315 if not os.path.isfile(path):
316 log.debug('hgrc file is not present at %s, skipping...' % path)
316 log.debug('hgrc file is not present at %s, skipping...' % path)
317 return False
317 return False
318 log.debug('reading hgrc from %s' % path)
318 log.debug('reading hgrc from %s' % path)
319 cfg = config.config()
319 cfg = config.config()
320 cfg.read(path)
320 cfg.read(path)
321 for section in ui_sections:
321 for section in ui_sections:
322 for k, v in cfg.items(section):
322 for k, v in cfg.items(section):
323 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
323 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
324 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
324 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
325
325
326 elif read_from == 'db':
326 elif read_from == 'db':
327 sa = meta.Session()
327 sa = meta.Session()
328 ret = sa.query(RhodeCodeUi)\
328 ret = sa.query(RhodeCodeUi)\
329 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
329 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
330 .all()
330 .all()
331
331
332 hg_ui = ret
332 hg_ui = ret
333 for ui_ in hg_ui:
333 for ui_ in hg_ui:
334 if ui_.ui_active:
334 if ui_.ui_active:
335 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
335 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
336 ui_.ui_key, ui_.ui_value)
336 ui_.ui_key, ui_.ui_value)
337 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
337 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
338 safe_str(ui_.ui_value))
338 safe_str(ui_.ui_value))
339 if ui_.ui_key == 'push_ssl':
339 if ui_.ui_key == 'push_ssl':
340 # force set push_ssl requirement to False, rhodecode
340 # force set push_ssl requirement to False, rhodecode
341 # handles that
341 # handles that
342 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
342 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
343 False)
343 False)
344 if clear_session:
344 if clear_session:
345 meta.Session.remove()
345 meta.Session.remove()
346 return baseui
346 return baseui
347
347
348
348
349 def set_rhodecode_config(config):
349 def set_rhodecode_config(config):
350 """
350 """
351 Updates pylons config with new settings from database
351 Updates pylons config with new settings from database
352
352
353 :param config:
353 :param config:
354 """
354 """
355 hgsettings = RhodeCodeSetting.get_app_settings()
355 hgsettings = RhodeCodeSetting.get_app_settings()
356
356
357 for k, v in hgsettings.items():
357 for k, v in hgsettings.items():
358 config[k] = v
358 config[k] = v
359
359
360
360
361 def invalidate_cache(cache_key, *args):
361 def invalidate_cache(cache_key, *args):
362 """
362 """
363 Puts cache invalidation task into db for
363 Puts cache invalidation task into db for
364 further global cache invalidation
364 further global cache invalidation
365 """
365 """
366
366
367 from rhodecode.model.scm import ScmModel
367 from rhodecode.model.scm import ScmModel
368
368
369 if cache_key.startswith('get_repo_cached_'):
369 if cache_key.startswith('get_repo_cached_'):
370 name = cache_key.split('get_repo_cached_')[-1]
370 name = cache_key.split('get_repo_cached_')[-1]
371 ScmModel().mark_for_invalidation(name)
371 ScmModel().mark_for_invalidation(name)
372
372
373
373
374 def map_groups(path):
374 def map_groups(path):
375 """
375 """
376 Given a full path to a repository, create all nested groups that this
376 Given a full path to a repository, create all nested groups that this
377 repo is inside. This function creates parent-child relationships between
377 repo is inside. This function creates parent-child relationships between
378 groups and creates default perms for all new groups.
378 groups and creates default perms for all new groups.
379
379
380 :param paths: full path to repository
380 :param paths: full path to repository
381 """
381 """
382 sa = meta.Session()
382 sa = meta.Session()
383 groups = path.split(Repository.url_sep())
383 groups = path.split(Repository.url_sep())
384 parent = None
384 parent = None
385 group = None
385 group = None
386
386
387 # last element is repo in nested groups structure
387 # last element is repo in nested groups structure
388 groups = groups[:-1]
388 groups = groups[:-1]
389 rgm = ReposGroupModel(sa)
389 rgm = ReposGroupModel(sa)
390 for lvl, group_name in enumerate(groups):
390 for lvl, group_name in enumerate(groups):
391 group_name = '/'.join(groups[:lvl] + [group_name])
391 group_name = '/'.join(groups[:lvl] + [group_name])
392 group = RepoGroup.get_by_group_name(group_name)
392 group = RepoGroup.get_by_group_name(group_name)
393 desc = '%s group' % group_name
393 desc = '%s group' % group_name
394
394
395 # skip folders that are now removed repos
395 # skip folders that are now removed repos
396 if REMOVED_REPO_PAT.match(group_name):
396 if REMOVED_REPO_PAT.match(group_name):
397 break
397 break
398
398
399 if group is None:
399 if group is None:
400 log.debug('creating group level: %s group_name: %s' % (lvl,
400 log.debug('creating group level: %s group_name: %s' % (lvl,
401 group_name))
401 group_name))
402 group = RepoGroup(group_name, parent)
402 group = RepoGroup(group_name, parent)
403 group.group_description = desc
403 group.group_description = desc
404 sa.add(group)
404 sa.add(group)
405 rgm._create_default_perms(group)
405 rgm._create_default_perms(group)
406 sa.flush()
406 sa.flush()
407 parent = group
407 parent = group
408 return group
408 return group
409
409
410
410
411 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
411 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
412 install_git_hook=False):
412 install_git_hook=False):
413 """
413 """
414 maps all repos given in initial_repo_list, non existing repositories
414 maps all repos given in initial_repo_list, non existing repositories
415 are created, if remove_obsolete is True it also check for db entries
415 are created, if remove_obsolete is True it also check for db entries
416 that are not in initial_repo_list and removes them.
416 that are not in initial_repo_list and removes them.
417
417
418 :param initial_repo_list: list of repositories found by scanning methods
418 :param initial_repo_list: list of repositories found by scanning methods
419 :param remove_obsolete: check for obsolete entries in database
419 :param remove_obsolete: check for obsolete entries in database
420 :param install_git_hook: if this is True, also check and install githook
420 :param install_git_hook: if this is True, also check and install githook
421 for a repo if missing
421 for a repo if missing
422 """
422 """
423 from rhodecode.model.repo import RepoModel
423 from rhodecode.model.repo import RepoModel
424 from rhodecode.model.scm import ScmModel
424 from rhodecode.model.scm import ScmModel
425 sa = meta.Session()
425 sa = meta.Session()
426 rm = RepoModel()
426 rm = RepoModel()
427 user = sa.query(User).filter(User.admin == True).first()
427 user = sa.query(User).filter(User.admin == True).first()
428 if user is None:
428 if user is None:
429 raise Exception('Missing administrative account!')
429 raise Exception('Missing administrative account!')
430 added = []
430 added = []
431
431
432 # # clear cache keys
432 # # clear cache keys
433 # log.debug("Clearing cache keys now...")
433 # log.debug("Clearing cache keys now...")
434 # CacheInvalidation.clear_cache()
434 # CacheInvalidation.clear_cache()
435 # sa.commit()
435 # sa.commit()
436
436
437 ##creation defaults
437 ##creation defaults
438 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
438 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
439 enable_statistics = defs.get('repo_enable_statistics')
439 enable_statistics = defs.get('repo_enable_statistics')
440 enable_locking = defs.get('repo_enable_locking')
440 enable_locking = defs.get('repo_enable_locking')
441 enable_downloads = defs.get('repo_enable_downloads')
441 enable_downloads = defs.get('repo_enable_downloads')
442 private = defs.get('repo_private')
442 private = defs.get('repo_private')
443
443
444 for name, repo in initial_repo_list.items():
444 for name, repo in initial_repo_list.items():
445 group = map_groups(name)
445 group = map_groups(name)
446 db_repo = rm.get_by_repo_name(name)
446 db_repo = rm.get_by_repo_name(name)
447 # found repo that is on filesystem not in RhodeCode database
447 # found repo that is on filesystem not in RhodeCode database
448 if not db_repo:
448 if not db_repo:
449 log.info('repository %s not found, creating now' % name)
449 log.info('repository %s not found, creating now' % name)
450 added.append(name)
450 added.append(name)
451 desc = (repo.description
451 desc = (repo.description
452 if repo.description != 'unknown'
452 if repo.description != 'unknown'
453 else '%s repository' % name)
453 else '%s repository' % name)
454
454
455 new_repo = rm.create_repo(
455 new_repo = rm.create_repo(
456 repo_name=name,
456 repo_name=name,
457 repo_type=repo.alias,
457 repo_type=repo.alias,
458 description=desc,
458 description=desc,
459 repos_group=getattr(group, 'group_id', None),
459 repos_group=getattr(group, 'group_id', None),
460 owner=user,
460 owner=user,
461 just_db=True,
461 just_db=True,
462 enable_locking=enable_locking,
462 enable_locking=enable_locking,
463 enable_downloads=enable_downloads,
463 enable_downloads=enable_downloads,
464 enable_statistics=enable_statistics,
464 enable_statistics=enable_statistics,
465 private=private
465 private=private
466 )
466 )
467 # we added that repo just now, and make sure it has githook
467 # we added that repo just now, and make sure it has githook
468 # installed
468 # installed
469 if new_repo.repo_type == 'git':
469 if new_repo.repo_type == 'git':
470 ScmModel().install_git_hook(new_repo.scm_instance)
470 ScmModel().install_git_hook(new_repo.scm_instance)
471 new_repo.update_changeset_cache()
471 new_repo.update_changeset_cache()
472 elif install_git_hook:
472 elif install_git_hook:
473 if db_repo.repo_type == 'git':
473 if db_repo.repo_type == 'git':
474 ScmModel().install_git_hook(db_repo.scm_instance)
474 ScmModel().install_git_hook(db_repo.scm_instance)
475 # during starting install all cache keys for all repositories in the
475 # during starting install all cache keys for all repositories in the
476 # system, this will register all repos and multiple instances
476 # system, this will register all repos and multiple instances
477 key, _prefix, _org_key = CacheInvalidation._get_key(name)
477 key, _prefix, _org_key = CacheInvalidation._get_key(name)
478 CacheInvalidation.invalidate(name)
478 CacheInvalidation.invalidate(name)
479 log.debug("Creating a cache key for %s, instance_id %s"
479 log.debug("Creating a cache key for %s, instance_id %s"
480 % (name, _prefix or 'unknown'))
480 % (name, _prefix or 'unknown'))
481
481
482 sa.commit()
482 sa.commit()
483 removed = []
483 removed = []
484 if remove_obsolete:
484 if remove_obsolete:
485 # remove from database those repositories that are not in the filesystem
485 # remove from database those repositories that are not in the filesystem
486 for repo in sa.query(Repository).all():
486 for repo in sa.query(Repository).all():
487 if repo.repo_name not in initial_repo_list.keys():
487 if repo.repo_name not in initial_repo_list.keys():
488 log.debug("Removing non-existing repository found in db `%s`" %
488 log.debug("Removing non-existing repository found in db `%s`" %
489 repo.repo_name)
489 repo.repo_name)
490 try:
490 try:
491 sa.delete(repo)
491 sa.delete(repo)
492 sa.commit()
492 sa.commit()
493 removed.append(repo.repo_name)
493 removed.append(repo.repo_name)
494 except:
494 except:
495 #don't hold further removals on error
495 #don't hold further removals on error
496 log.error(traceback.format_exc())
496 log.error(traceback.format_exc())
497 sa.rollback()
497 sa.rollback()
498
498
499 return added, removed
499 return added, removed
500
500
501
501
502 # set cache regions for beaker so celery can utilise it
502 # set cache regions for beaker so celery can utilise it
503 def add_cache(settings):
503 def add_cache(settings):
504 cache_settings = {'regions': None}
504 cache_settings = {'regions': None}
505 for key in settings.keys():
505 for key in settings.keys():
506 for prefix in ['beaker.cache.', 'cache.']:
506 for prefix in ['beaker.cache.', 'cache.']:
507 if key.startswith(prefix):
507 if key.startswith(prefix):
508 name = key.split(prefix)[1].strip()
508 name = key.split(prefix)[1].strip()
509 cache_settings[name] = settings[key].strip()
509 cache_settings[name] = settings[key].strip()
510 if cache_settings['regions']:
510 if cache_settings['regions']:
511 for region in cache_settings['regions'].split(','):
511 for region in cache_settings['regions'].split(','):
512 region = region.strip()
512 region = region.strip()
513 region_settings = {}
513 region_settings = {}
514 for key, value in cache_settings.items():
514 for key, value in cache_settings.items():
515 if key.startswith(region):
515 if key.startswith(region):
516 region_settings[key.split('.')[1]] = value
516 region_settings[key.split('.')[1]] = value
517 region_settings['expire'] = int(region_settings.get('expire',
517 region_settings['expire'] = int(region_settings.get('expire',
518 60))
518 60))
519 region_settings.setdefault('lock_dir',
519 region_settings.setdefault('lock_dir',
520 cache_settings.get('lock_dir'))
520 cache_settings.get('lock_dir'))
521 region_settings.setdefault('data_dir',
521 region_settings.setdefault('data_dir',
522 cache_settings.get('data_dir'))
522 cache_settings.get('data_dir'))
523
523
524 if 'type' not in region_settings:
524 if 'type' not in region_settings:
525 region_settings['type'] = cache_settings.get('type',
525 region_settings['type'] = cache_settings.get('type',
526 'memory')
526 'memory')
527 beaker.cache.cache_regions[region] = region_settings
527 beaker.cache.cache_regions[region] = region_settings
528
528
529
529
530 def load_rcextensions(root_path):
530 def load_rcextensions(root_path):
531 import rhodecode
531 import rhodecode
532 from rhodecode.config import conf
532 from rhodecode.config import conf
533
533
534 path = os.path.join(root_path, 'rcextensions', '__init__.py')
534 path = os.path.join(root_path, 'rcextensions', '__init__.py')
535 if os.path.isfile(path):
535 if os.path.isfile(path):
536 rcext = create_module('rc', path)
536 rcext = create_module('rc', path)
537 EXT = rhodecode.EXTENSIONS = rcext
537 EXT = rhodecode.EXTENSIONS = rcext
538 log.debug('Found rcextensions now loading %s...' % rcext)
538 log.debug('Found rcextensions now loading %s...' % rcext)
539
539
540 # Additional mappings that are not present in the pygments lexers
540 # Additional mappings that are not present in the pygments lexers
541 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
541 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
542
542
543 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
543 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
544
544
545 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
545 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
546 log.debug('settings custom INDEX_EXTENSIONS')
546 log.debug('settings custom INDEX_EXTENSIONS')
547 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
547 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
548
548
549 #ADDITIONAL MAPPINGS
549 #ADDITIONAL MAPPINGS
550 log.debug('adding extra into INDEX_EXTENSIONS')
550 log.debug('adding extra into INDEX_EXTENSIONS')
551 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
551 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
552
552
553 # auto check if the module is not missing any data, set to default if is
553 # auto check if the module is not missing any data, set to default if is
554 # this will help autoupdate new feature of rcext module
554 # this will help autoupdate new feature of rcext module
555 from rhodecode.config import rcextensions
555 from rhodecode.config import rcextensions
556 for k in dir(rcextensions):
556 for k in dir(rcextensions):
557 if not k.startswith('_') and not hasattr(EXT, k):
557 if not k.startswith('_') and not hasattr(EXT, k):
558 setattr(EXT, k, getattr(rcextensions, k))
558 setattr(EXT, k, getattr(rcextensions, k))
559
559
560
560
561 def get_custom_lexer(extension):
561 def get_custom_lexer(extension):
562 """
562 """
563 returns a custom lexer if it's defined in rcextensions module, or None
563 returns a custom lexer if it's defined in rcextensions module, or None
564 if there's no custom lexer defined
564 if there's no custom lexer defined
565 """
565 """
566 import rhodecode
566 import rhodecode
567 from pygments import lexers
567 from pygments import lexers
568 #check if we didn't define this extension as other lexer
568 #check if we didn't define this extension as other lexer
569 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
569 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
570 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
570 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
571 return lexers.get_lexer_by_name(_lexer_name)
571 return lexers.get_lexer_by_name(_lexer_name)
572
572
573
573
574 #==============================================================================
574 #==============================================================================
575 # TEST FUNCTIONS AND CREATORS
575 # TEST FUNCTIONS AND CREATORS
576 #==============================================================================
576 #==============================================================================
577 def create_test_index(repo_location, config, full_index):
577 def create_test_index(repo_location, config, full_index):
578 """
578 """
579 Makes default test index
579 Makes default test index
580
580
581 :param config: test config
581 :param config: test config
582 :param full_index:
582 :param full_index:
583 """
583 """
584
584
585 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
585 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
586 from rhodecode.lib.pidlock import DaemonLock, LockHeld
586 from rhodecode.lib.pidlock import DaemonLock, LockHeld
587
587
588 repo_location = repo_location
588 repo_location = repo_location
589
589
590 index_location = os.path.join(config['app_conf']['index_dir'])
590 index_location = os.path.join(config['app_conf']['index_dir'])
591 if not os.path.exists(index_location):
591 if not os.path.exists(index_location):
592 os.makedirs(index_location)
592 os.makedirs(index_location)
593
593
594 try:
594 try:
595 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
595 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
596 WhooshIndexingDaemon(index_location=index_location,
596 WhooshIndexingDaemon(index_location=index_location,
597 repo_location=repo_location)\
597 repo_location=repo_location)\
598 .run(full_index=full_index)
598 .run(full_index=full_index)
599 l.release()
599 l.release()
600 except LockHeld:
600 except LockHeld:
601 pass
601 pass
602
602
603
603
604 def create_test_env(repos_test_path, config):
604 def create_test_env(repos_test_path, config):
605 """
605 """
606 Makes a fresh database and
606 Makes a fresh database and
607 install test repository into tmp dir
607 install test repository into tmp dir
608 """
608 """
609 from rhodecode.lib.db_manage import DbManage
609 from rhodecode.lib.db_manage import DbManage
610 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
610 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
611
611
612 # PART ONE create db
612 # PART ONE create db
613 dbconf = config['sqlalchemy.db1.url']
613 dbconf = config['sqlalchemy.db1.url']
614 log.debug('making test db %s' % dbconf)
614 log.debug('making test db %s' % dbconf)
615
615
616 # create test dir if it doesn't exist
616 # create test dir if it doesn't exist
617 if not os.path.isdir(repos_test_path):
617 if not os.path.isdir(repos_test_path):
618 log.debug('Creating testdir %s' % repos_test_path)
618 log.debug('Creating testdir %s' % repos_test_path)
619 os.makedirs(repos_test_path)
619 os.makedirs(repos_test_path)
620
620
621 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
621 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
622 tests=True)
622 tests=True)
623 dbmanage.create_tables(override=True)
623 dbmanage.create_tables(override=True)
624 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
624 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
625 dbmanage.create_default_user()
625 dbmanage.create_default_user()
626 dbmanage.admin_prompt()
626 dbmanage.admin_prompt()
627 dbmanage.create_permissions()
627 dbmanage.create_permissions()
628 dbmanage.populate_default_permissions()
628 dbmanage.populate_default_permissions()
629 Session().commit()
629 Session().commit()
630 # PART TWO make test repo
630 # PART TWO make test repo
631 log.debug('making test vcs repositories')
631 log.debug('making test vcs repositories')
632
632
633 idx_path = config['app_conf']['index_dir']
633 idx_path = config['app_conf']['index_dir']
634 data_path = config['app_conf']['cache_dir']
634 data_path = config['app_conf']['cache_dir']
635
635
636 #clean index and data
636 #clean index and data
637 if idx_path and os.path.exists(idx_path):
637 if idx_path and os.path.exists(idx_path):
638 log.debug('remove %s' % idx_path)
638 log.debug('remove %s' % idx_path)
639 shutil.rmtree(idx_path)
639 shutil.rmtree(idx_path)
640
640
641 if data_path and os.path.exists(data_path):
641 if data_path and os.path.exists(data_path):
642 log.debug('remove %s' % data_path)
642 log.debug('remove %s' % data_path)
643 shutil.rmtree(data_path)
643 shutil.rmtree(data_path)
644
644
645 #CREATE DEFAULT TEST REPOS
645 #CREATE DEFAULT TEST REPOS
646 cur_dir = dn(dn(abspath(__file__)))
646 cur_dir = dn(dn(abspath(__file__)))
647 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
647 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
648 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
648 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
649 tar.close()
649 tar.close()
650
650
651 cur_dir = dn(dn(abspath(__file__)))
651 cur_dir = dn(dn(abspath(__file__)))
652 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
652 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
653 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
653 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
654 tar.close()
654 tar.close()
655
655
656 #LOAD VCS test stuff
656 #LOAD VCS test stuff
657 from rhodecode.tests.vcs import setup_package
657 from rhodecode.tests.vcs import setup_package
658 setup_package()
658 setup_package()
659
659
660
660
661 #==============================================================================
661 #==============================================================================
662 # PASTER COMMANDS
662 # PASTER COMMANDS
663 #==============================================================================
663 #==============================================================================
664 class BasePasterCommand(Command):
664 class BasePasterCommand(Command):
665 """
665 """
666 Abstract Base Class for paster commands.
666 Abstract Base Class for paster commands.
667
667
668 The celery commands are somewhat aggressive about loading
668 The celery commands are somewhat aggressive about loading
669 celery.conf, and since our module sets the `CELERY_LOADER`
669 celery.conf, and since our module sets the `CELERY_LOADER`
670 environment variable to our loader, we have to bootstrap a bit and
670 environment variable to our loader, we have to bootstrap a bit and
671 make sure we've had a chance to load the pylons config off of the
671 make sure we've had a chance to load the pylons config off of the
672 command line, otherwise everything fails.
672 command line, otherwise everything fails.
673 """
673 """
674 min_args = 1
674 min_args = 1
675 min_args_error = "Please provide a paster config file as an argument."
675 min_args_error = "Please provide a paster config file as an argument."
676 takes_config_file = 1
676 takes_config_file = 1
677 requires_config_file = True
677 requires_config_file = True
678
678
679 def notify_msg(self, msg, log=False):
679 def notify_msg(self, msg, log=False):
680 """Make a notification to user, additionally if logger is passed
680 """Make a notification to user, additionally if logger is passed
681 it logs this action using given logger
681 it logs this action using given logger
682
682
683 :param msg: message that will be printed to user
683 :param msg: message that will be printed to user
684 :param log: logging instance, to use to additionally log this message
684 :param log: logging instance, to use to additionally log this message
685
685
686 """
686 """
687 if log and isinstance(log, logging):
687 if log and isinstance(log, logging):
688 log(msg)
688 log(msg)
689
689
690 def run(self, args):
690 def run(self, args):
691 """
691 """
692 Overrides Command.run
692 Overrides Command.run
693
693
694 Checks for a config file argument and loads it.
694 Checks for a config file argument and loads it.
695 """
695 """
696 if len(args) < self.min_args:
696 if len(args) < self.min_args:
697 raise BadCommand(
697 raise BadCommand(
698 self.min_args_error % {'min_args': self.min_args,
698 self.min_args_error % {'min_args': self.min_args,
699 'actual_args': len(args)})
699 'actual_args': len(args)})
700
700
701 # Decrement because we're going to lob off the first argument.
701 # Decrement because we're going to lob off the first argument.
702 # @@ This is hacky
702 # @@ This is hacky
703 self.min_args -= 1
703 self.min_args -= 1
704 self.bootstrap_config(args[0])
704 self.bootstrap_config(args[0])
705 self.update_parser()
705 self.update_parser()
706 return super(BasePasterCommand, self).run(args[1:])
706 return super(BasePasterCommand, self).run(args[1:])
707
707
708 def update_parser(self):
708 def update_parser(self):
709 """
709 """
710 Abstract method. Allows for the class's parser to be updated
710 Abstract method. Allows for the class's parser to be updated
711 before the superclass's `run` method is called. Necessary to
711 before the superclass's `run` method is called. Necessary to
712 allow options/arguments to be passed through to the underlying
712 allow options/arguments to be passed through to the underlying
713 celery command.
713 celery command.
714 """
714 """
715 raise NotImplementedError("Abstract Method.")
715 raise NotImplementedError("Abstract Method.")
716
716
717 def bootstrap_config(self, conf):
717 def bootstrap_config(self, conf):
718 """
718 """
719 Loads the pylons configuration.
719 Loads the pylons configuration.
720 """
720 """
721 from pylons import config as pylonsconfig
721 from pylons import config as pylonsconfig
722
722
723 self.path_to_ini_file = os.path.realpath(conf)
723 self.path_to_ini_file = os.path.realpath(conf)
724 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
724 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
725 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
725 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
726
726
727 def _init_session(self):
727 def _init_session(self):
728 """
728 """
729 Inits SqlAlchemy Session
729 Inits SqlAlchemy Session
730 """
730 """
731 logging.config.fileConfig(self.path_to_ini_file)
731 logging.config.fileConfig(self.path_to_ini_file)
732 from pylons import config
732 from pylons import config
733 from rhodecode.model import init_model
733 from rhodecode.model import init_model
734 from rhodecode.lib.utils2 import engine_from_config
734 from rhodecode.lib.utils2 import engine_from_config
735
735
736 #get to remove repos !!
736 #get to remove repos !!
737 add_cache(config)
737 add_cache(config)
738 engine = engine_from_config(config, 'sqlalchemy.db1.')
738 engine = engine_from_config(config, 'sqlalchemy.db1.')
739 init_model(engine)
739 init_model(engine)
740
740
741
741
742 def check_git_version():
742 def check_git_version():
743 """
743 """
744 Checks what version of git is installed in system, and issues a warning
744 Checks what version of git is installed in system, and issues a warning
745 if it's too old for RhodeCode to properly work.
745 if it's too old for RhodeCode to properly work.
746 """
746 """
747 import subprocess
747 from rhodecode import BACKENDS
748 from rhodecode.lib.vcs.backends.git.repository import GitRepository
748 from distutils.version import StrictVersion
749 from distutils.version import StrictVersion
749 from rhodecode import BACKENDS
750
750
751 p = subprocess.Popen('git --version', shell=True,
751 stdout, stderr = GitRepository._run_git_command('--version')
752 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
752
753 stdout, stderr = p.communicate()
754 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
753 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
755 if len(ver.split('.')) > 3:
754 if len(ver.split('.')) > 3:
756 #StrictVersion needs to be only 3 element type
755 #StrictVersion needs to be only 3 element type
757 ver = '.'.join(ver.split('.')[:3])
756 ver = '.'.join(ver.split('.')[:3])
758 try:
757 try:
759 _ver = StrictVersion(ver)
758 _ver = StrictVersion(ver)
760 except:
759 except:
761 _ver = StrictVersion('0.0.0')
760 _ver = StrictVersion('0.0.0')
762 stderr = traceback.format_exc()
761 stderr = traceback.format_exc()
763
762
764 req_ver = '1.7.4'
763 req_ver = '1.7.4'
765 to_old_git = False
764 to_old_git = False
766 if _ver < StrictVersion(req_ver):
765 if _ver < StrictVersion(req_ver):
767 to_old_git = True
766 to_old_git = True
768
767
769 if 'git' in BACKENDS:
768 if 'git' in BACKENDS:
770 log.debug('GIT version detected: %s' % stdout)
769 log.debug('GIT version detected: %s' % stdout)
771 if stderr:
770 if stderr:
772 log.warning('Unable to detect git version org error was:%r' % stderr)
771 log.warning('Unable to detect git version org error was:%r' % stderr)
773 elif to_old_git:
772 elif to_old_git:
774 log.warning('RhodeCode detected git version %s, which is too old '
773 log.warning('RhodeCode detected git version %s, which is too old '
775 'for the system to function properly. Make sure '
774 'for the system to function properly. Make sure '
776 'its version is at least %s' % (ver, req_ver))
775 'its version is at least %s' % (ver, req_ver))
777 return _ver
776 return _ver
778
777
779
778
780 @decorator.decorator
779 @decorator.decorator
781 def jsonify(func, *args, **kwargs):
780 def jsonify(func, *args, **kwargs):
782 """Action decorator that formats output for JSON
781 """Action decorator that formats output for JSON
783
782
784 Given a function that will return content, this decorator will turn
783 Given a function that will return content, this decorator will turn
785 the result into JSON, with a content-type of 'application/json' and
784 the result into JSON, with a content-type of 'application/json' and
786 output it.
785 output it.
787
786
788 """
787 """
789 from pylons.decorators.util import get_pylons
788 from pylons.decorators.util import get_pylons
790 from rhodecode.lib.ext_json import json
789 from rhodecode.lib.ext_json import json
791 pylons = get_pylons(args)
790 pylons = get_pylons(args)
792 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
791 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
793 data = func(*args, **kwargs)
792 data = func(*args, **kwargs)
794 if isinstance(data, (list, tuple)):
793 if isinstance(data, (list, tuple)):
795 msg = "JSON responses with Array envelopes are susceptible to " \
794 msg = "JSON responses with Array envelopes are susceptible to " \
796 "cross-site data leak attacks, see " \
795 "cross-site data leak attacks, see " \
797 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
796 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
798 warnings.warn(msg, Warning, 2)
797 warnings.warn(msg, Warning, 2)
799 log.warning(msg)
798 log.warning(msg)
800 log.debug("Returning JSON wrapped action output")
799 log.debug("Returning JSON wrapped action output")
801 return json.dumps(data, encoding='utf-8')
800 return json.dumps(data, encoding='utf-8')
@@ -1,542 +1,544 b''
1 import re
1 import re
2 from itertools import chain
2 from itertools import chain
3 from dulwich import objects
3 from dulwich import objects
4 from subprocess import Popen, PIPE
4 from subprocess import Popen, PIPE
5 import rhodecode
5 from rhodecode.lib.vcs.conf import settings
6 from rhodecode.lib.vcs.conf import settings
6 from rhodecode.lib.vcs.exceptions import RepositoryError
7 from rhodecode.lib.vcs.exceptions import RepositoryError
7 from rhodecode.lib.vcs.exceptions import ChangesetError
8 from rhodecode.lib.vcs.exceptions import ChangesetError
8 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
9 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
9 from rhodecode.lib.vcs.exceptions import VCSError
10 from rhodecode.lib.vcs.exceptions import VCSError
10 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
11 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
11 from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError
12 from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError
12 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
13 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
13 from rhodecode.lib.vcs.nodes import FileNode, DirNode, NodeKind, RootNode, \
14 from rhodecode.lib.vcs.nodes import FileNode, DirNode, NodeKind, RootNode, \
14 RemovedFileNode, SubModuleNode, ChangedFileNodesGenerator,\
15 RemovedFileNode, SubModuleNode, ChangedFileNodesGenerator,\
15 AddedFileNodesGenerator, RemovedFileNodesGenerator
16 AddedFileNodesGenerator, RemovedFileNodesGenerator
16 from rhodecode.lib.vcs.utils import safe_unicode
17 from rhodecode.lib.vcs.utils import safe_unicode
17 from rhodecode.lib.vcs.utils import date_fromtimestamp
18 from rhodecode.lib.vcs.utils import date_fromtimestamp
18 from rhodecode.lib.vcs.utils.lazy import LazyProperty
19 from rhodecode.lib.vcs.utils.lazy import LazyProperty
19
20
20
21
21 class GitChangeset(BaseChangeset):
22 class GitChangeset(BaseChangeset):
22 """
23 """
23 Represents state of the repository at single revision.
24 Represents state of the repository at single revision.
24 """
25 """
25
26
26 def __init__(self, repository, revision):
27 def __init__(self, repository, revision):
27 self._stat_modes = {}
28 self._stat_modes = {}
28 self.repository = repository
29 self.repository = repository
29
30
30 try:
31 try:
31 commit = self.repository._repo.get_object(revision)
32 commit = self.repository._repo.get_object(revision)
32 if isinstance(commit, objects.Tag):
33 if isinstance(commit, objects.Tag):
33 revision = commit.object[1]
34 revision = commit.object[1]
34 commit = self.repository._repo.get_object(commit.object[1])
35 commit = self.repository._repo.get_object(commit.object[1])
35 except KeyError:
36 except KeyError:
36 raise RepositoryError("Cannot get object with id %s" % revision)
37 raise RepositoryError("Cannot get object with id %s" % revision)
37 self.raw_id = revision
38 self.raw_id = revision
38 self.id = self.raw_id
39 self.id = self.raw_id
39 self.short_id = self.raw_id[:12]
40 self.short_id = self.raw_id[:12]
40 self._commit = commit
41 self._commit = commit
41
42
42 self._tree_id = commit.tree
43 self._tree_id = commit.tree
43 self._commiter_property = 'committer'
44 self._commiter_property = 'committer'
44 self._author_property = 'author'
45 self._author_property = 'author'
45 self._date_property = 'commit_time'
46 self._date_property = 'commit_time'
46 self._date_tz_property = 'commit_timezone'
47 self._date_tz_property = 'commit_timezone'
47 self.revision = repository.revisions.index(revision)
48 self.revision = repository.revisions.index(revision)
48
49
49 self.message = safe_unicode(commit.message)
50 self.message = safe_unicode(commit.message)
50
51
51 self.nodes = {}
52 self.nodes = {}
52 self._paths = {}
53 self._paths = {}
53
54
54 @LazyProperty
55 @LazyProperty
55 def commiter(self):
56 def commiter(self):
56 return safe_unicode(getattr(self._commit, self._commiter_property))
57 return safe_unicode(getattr(self._commit, self._commiter_property))
57
58
58 @LazyProperty
59 @LazyProperty
59 def author(self):
60 def author(self):
60 return safe_unicode(getattr(self._commit, self._author_property))
61 return safe_unicode(getattr(self._commit, self._author_property))
61
62
62 @LazyProperty
63 @LazyProperty
63 def date(self):
64 def date(self):
64 return date_fromtimestamp(getattr(self._commit, self._date_property),
65 return date_fromtimestamp(getattr(self._commit, self._date_property),
65 getattr(self._commit, self._date_tz_property))
66 getattr(self._commit, self._date_tz_property))
66
67
67 @LazyProperty
68 @LazyProperty
68 def _timestamp(self):
69 def _timestamp(self):
69 return getattr(self._commit, self._date_property)
70 return getattr(self._commit, self._date_property)
70
71
71 @LazyProperty
72 @LazyProperty
72 def status(self):
73 def status(self):
73 """
74 """
74 Returns modified, added, removed, deleted files for current changeset
75 Returns modified, added, removed, deleted files for current changeset
75 """
76 """
76 return self.changed, self.added, self.removed
77 return self.changed, self.added, self.removed
77
78
78 @LazyProperty
79 @LazyProperty
79 def tags(self):
80 def tags(self):
80 _tags = []
81 _tags = []
81 for tname, tsha in self.repository.tags.iteritems():
82 for tname, tsha in self.repository.tags.iteritems():
82 if tsha == self.raw_id:
83 if tsha == self.raw_id:
83 _tags.append(tname)
84 _tags.append(tname)
84 return _tags
85 return _tags
85
86
86 @LazyProperty
87 @LazyProperty
87 def branch(self):
88 def branch(self):
88
89
89 heads = self.repository._heads(reverse=False)
90 heads = self.repository._heads(reverse=False)
90
91
91 ref = heads.get(self.raw_id)
92 ref = heads.get(self.raw_id)
92 if ref:
93 if ref:
93 return safe_unicode(ref)
94 return safe_unicode(ref)
94
95
95 def _fix_path(self, path):
96 def _fix_path(self, path):
96 """
97 """
97 Paths are stored without trailing slash so we need to get rid off it if
98 Paths are stored without trailing slash so we need to get rid off it if
98 needed.
99 needed.
99 """
100 """
100 if path.endswith('/'):
101 if path.endswith('/'):
101 path = path.rstrip('/')
102 path = path.rstrip('/')
102 return path
103 return path
103
104
104 def _get_id_for_path(self, path):
105 def _get_id_for_path(self, path):
105
106
106 # FIXME: Please, spare a couple of minutes and make those codes cleaner;
107 # FIXME: Please, spare a couple of minutes and make those codes cleaner;
107 if not path in self._paths:
108 if not path in self._paths:
108 path = path.strip('/')
109 path = path.strip('/')
109 # set root tree
110 # set root tree
110 tree = self.repository._repo[self._tree_id]
111 tree = self.repository._repo[self._tree_id]
111 if path == '':
112 if path == '':
112 self._paths[''] = tree.id
113 self._paths[''] = tree.id
113 return tree.id
114 return tree.id
114 splitted = path.split('/')
115 splitted = path.split('/')
115 dirs, name = splitted[:-1], splitted[-1]
116 dirs, name = splitted[:-1], splitted[-1]
116 curdir = ''
117 curdir = ''
117
118
118 # initially extract things from root dir
119 # initially extract things from root dir
119 for item, stat, id in tree.iteritems():
120 for item, stat, id in tree.iteritems():
120 if curdir:
121 if curdir:
121 name = '/'.join((curdir, item))
122 name = '/'.join((curdir, item))
122 else:
123 else:
123 name = item
124 name = item
124 self._paths[name] = id
125 self._paths[name] = id
125 self._stat_modes[name] = stat
126 self._stat_modes[name] = stat
126
127
127 for dir in dirs:
128 for dir in dirs:
128 if curdir:
129 if curdir:
129 curdir = '/'.join((curdir, dir))
130 curdir = '/'.join((curdir, dir))
130 else:
131 else:
131 curdir = dir
132 curdir = dir
132 dir_id = None
133 dir_id = None
133 for item, stat, id in tree.iteritems():
134 for item, stat, id in tree.iteritems():
134 if dir == item:
135 if dir == item:
135 dir_id = id
136 dir_id = id
136 if dir_id:
137 if dir_id:
137 # Update tree
138 # Update tree
138 tree = self.repository._repo[dir_id]
139 tree = self.repository._repo[dir_id]
139 if not isinstance(tree, objects.Tree):
140 if not isinstance(tree, objects.Tree):
140 raise ChangesetError('%s is not a directory' % curdir)
141 raise ChangesetError('%s is not a directory' % curdir)
141 else:
142 else:
142 raise ChangesetError('%s have not been found' % curdir)
143 raise ChangesetError('%s have not been found' % curdir)
143
144
144 # cache all items from the given traversed tree
145 # cache all items from the given traversed tree
145 for item, stat, id in tree.iteritems():
146 for item, stat, id in tree.iteritems():
146 if curdir:
147 if curdir:
147 name = '/'.join((curdir, item))
148 name = '/'.join((curdir, item))
148 else:
149 else:
149 name = item
150 name = item
150 self._paths[name] = id
151 self._paths[name] = id
151 self._stat_modes[name] = stat
152 self._stat_modes[name] = stat
152 if not path in self._paths:
153 if not path in self._paths:
153 raise NodeDoesNotExistError("There is no file nor directory "
154 raise NodeDoesNotExistError("There is no file nor directory "
154 "at the given path %r at revision %r"
155 "at the given path %r at revision %r"
155 % (path, self.short_id))
156 % (path, self.short_id))
156 return self._paths[path]
157 return self._paths[path]
157
158
158 def _get_kind(self, path):
159 def _get_kind(self, path):
159 obj = self.repository._repo[self._get_id_for_path(path)]
160 obj = self.repository._repo[self._get_id_for_path(path)]
160 if isinstance(obj, objects.Blob):
161 if isinstance(obj, objects.Blob):
161 return NodeKind.FILE
162 return NodeKind.FILE
162 elif isinstance(obj, objects.Tree):
163 elif isinstance(obj, objects.Tree):
163 return NodeKind.DIR
164 return NodeKind.DIR
164
165
165 def _get_filectx(self, path):
166 def _get_filectx(self, path):
166 path = self._fix_path(path)
167 path = self._fix_path(path)
167 if self._get_kind(path) != NodeKind.FILE:
168 if self._get_kind(path) != NodeKind.FILE:
168 raise ChangesetError("File does not exist for revision %r at "
169 raise ChangesetError("File does not exist for revision %r at "
169 " %r" % (self.raw_id, path))
170 " %r" % (self.raw_id, path))
170 return path
171 return path
171
172
172 def _get_file_nodes(self):
173 def _get_file_nodes(self):
173 return chain(*(t[2] for t in self.walk()))
174 return chain(*(t[2] for t in self.walk()))
174
175
175 @LazyProperty
176 @LazyProperty
176 def parents(self):
177 def parents(self):
177 """
178 """
178 Returns list of parents changesets.
179 Returns list of parents changesets.
179 """
180 """
180 return [self.repository.get_changeset(parent)
181 return [self.repository.get_changeset(parent)
181 for parent in self._commit.parents]
182 for parent in self._commit.parents]
182
183
183 @LazyProperty
184 @LazyProperty
184 def children(self):
185 def children(self):
185 """
186 """
186 Returns list of children changesets.
187 Returns list of children changesets.
187 """
188 """
188 so, se = self.repository.run_git_command(
189 so, se = self.repository.run_git_command(
189 "rev-list --all --children | grep '^%s'" % self.raw_id
190 "rev-list --all --children | grep '^%s'" % self.raw_id
190 )
191 )
191
192
192 children = []
193 children = []
193 for l in so.splitlines():
194 for l in so.splitlines():
194 childs = l.split(' ')[1:]
195 childs = l.split(' ')[1:]
195 children.extend(childs)
196 children.extend(childs)
196 return [self.repository.get_changeset(cs) for cs in children]
197 return [self.repository.get_changeset(cs) for cs in children]
197
198
198 def next(self, branch=None):
199 def next(self, branch=None):
199
200
200 if branch and self.branch != branch:
201 if branch and self.branch != branch:
201 raise VCSError('Branch option used on changeset not belonging '
202 raise VCSError('Branch option used on changeset not belonging '
202 'to that branch')
203 'to that branch')
203
204
204 def _next(changeset, branch):
205 def _next(changeset, branch):
205 try:
206 try:
206 next_ = changeset.revision + 1
207 next_ = changeset.revision + 1
207 next_rev = changeset.repository.revisions[next_]
208 next_rev = changeset.repository.revisions[next_]
208 except IndexError:
209 except IndexError:
209 raise ChangesetDoesNotExistError
210 raise ChangesetDoesNotExistError
210 cs = changeset.repository.get_changeset(next_rev)
211 cs = changeset.repository.get_changeset(next_rev)
211
212
212 if branch and branch != cs.branch:
213 if branch and branch != cs.branch:
213 return _next(cs, branch)
214 return _next(cs, branch)
214
215
215 return cs
216 return cs
216
217
217 return _next(self, branch)
218 return _next(self, branch)
218
219
219 def prev(self, branch=None):
220 def prev(self, branch=None):
220 if branch and self.branch != branch:
221 if branch and self.branch != branch:
221 raise VCSError('Branch option used on changeset not belonging '
222 raise VCSError('Branch option used on changeset not belonging '
222 'to that branch')
223 'to that branch')
223
224
224 def _prev(changeset, branch):
225 def _prev(changeset, branch):
225 try:
226 try:
226 prev_ = changeset.revision - 1
227 prev_ = changeset.revision - 1
227 if prev_ < 0:
228 if prev_ < 0:
228 raise IndexError
229 raise IndexError
229 prev_rev = changeset.repository.revisions[prev_]
230 prev_rev = changeset.repository.revisions[prev_]
230 except IndexError:
231 except IndexError:
231 raise ChangesetDoesNotExistError
232 raise ChangesetDoesNotExistError
232
233
233 cs = changeset.repository.get_changeset(prev_rev)
234 cs = changeset.repository.get_changeset(prev_rev)
234
235
235 if branch and branch != cs.branch:
236 if branch and branch != cs.branch:
236 return _prev(cs, branch)
237 return _prev(cs, branch)
237
238
238 return cs
239 return cs
239
240
240 return _prev(self, branch)
241 return _prev(self, branch)
241
242
242 def diff(self, ignore_whitespace=True, context=3):
243 def diff(self, ignore_whitespace=True, context=3):
243 rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
244 rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
244 rev2 = self
245 rev2 = self
245 return ''.join(self.repository.get_diff(rev1, rev2,
246 return ''.join(self.repository.get_diff(rev1, rev2,
246 ignore_whitespace=ignore_whitespace,
247 ignore_whitespace=ignore_whitespace,
247 context=context))
248 context=context))
248
249
249 def get_file_mode(self, path):
250 def get_file_mode(self, path):
250 """
251 """
251 Returns stat mode of the file at the given ``path``.
252 Returns stat mode of the file at the given ``path``.
252 """
253 """
253 # ensure path is traversed
254 # ensure path is traversed
254 self._get_id_for_path(path)
255 self._get_id_for_path(path)
255 return self._stat_modes[path]
256 return self._stat_modes[path]
256
257
257 def get_file_content(self, path):
258 def get_file_content(self, path):
258 """
259 """
259 Returns content of the file at given ``path``.
260 Returns content of the file at given ``path``.
260 """
261 """
261 id = self._get_id_for_path(path)
262 id = self._get_id_for_path(path)
262 blob = self.repository._repo[id]
263 blob = self.repository._repo[id]
263 return blob.as_pretty_string()
264 return blob.as_pretty_string()
264
265
265 def get_file_size(self, path):
266 def get_file_size(self, path):
266 """
267 """
267 Returns size of the file at given ``path``.
268 Returns size of the file at given ``path``.
268 """
269 """
269 id = self._get_id_for_path(path)
270 id = self._get_id_for_path(path)
270 blob = self.repository._repo[id]
271 blob = self.repository._repo[id]
271 return blob.raw_length()
272 return blob.raw_length()
272
273
273 def get_file_changeset(self, path):
274 def get_file_changeset(self, path):
274 """
275 """
275 Returns last commit of the file at the given ``path``.
276 Returns last commit of the file at the given ``path``.
276 """
277 """
277 node = self.get_node(path)
278 node = self.get_node(path)
278 return node.history[0]
279 return node.history[0]
279
280
280 def get_file_history(self, path):
281 def get_file_history(self, path):
281 """
282 """
282 Returns history of file as reversed list of ``Changeset`` objects for
283 Returns history of file as reversed list of ``Changeset`` objects for
283 which file at given ``path`` has been modified.
284 which file at given ``path`` has been modified.
284
285
285 TODO: This function now uses os underlying 'git' and 'grep' commands
286 TODO: This function now uses os underlying 'git' and 'grep' commands
286 which is generally not good. Should be replaced with algorithm
287 which is generally not good. Should be replaced with algorithm
287 iterating commits.
288 iterating commits.
288 """
289 """
289 self._get_filectx(path)
290 self._get_filectx(path)
290
291
291 cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % (
292 cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % (
292 self.id, path
293 self.id, path
293 )
294 )
294 so, se = self.repository.run_git_command(cmd)
295 so, se = self.repository.run_git_command(cmd)
295 ids = re.findall(r'[0-9a-fA-F]{40}', so)
296 ids = re.findall(r'[0-9a-fA-F]{40}', so)
296 return [self.repository.get_changeset(id) for id in ids]
297 return [self.repository.get_changeset(id) for id in ids]
297
298
298 def get_file_history_2(self, path):
299 def get_file_history_2(self, path):
299 """
300 """
300 Returns history of file as reversed list of ``Changeset`` objects for
301 Returns history of file as reversed list of ``Changeset`` objects for
301 which file at given ``path`` has been modified.
302 which file at given ``path`` has been modified.
302
303
303 """
304 """
304 self._get_filectx(path)
305 self._get_filectx(path)
305 from dulwich.walk import Walker
306 from dulwich.walk import Walker
306 include = [self.id]
307 include = [self.id]
307 walker = Walker(self.repository._repo.object_store, include,
308 walker = Walker(self.repository._repo.object_store, include,
308 paths=[path], max_entries=1)
309 paths=[path], max_entries=1)
309 return [self.repository.get_changeset(sha)
310 return [self.repository.get_changeset(sha)
310 for sha in (x.commit.id for x in walker)]
311 for sha in (x.commit.id for x in walker)]
311
312
312 def get_file_annotate(self, path):
313 def get_file_annotate(self, path):
313 """
314 """
314 Returns a generator of four element tuples with
315 Returns a generator of four element tuples with
315 lineno, sha, changeset lazy loader and line
316 lineno, sha, changeset lazy loader and line
316
317
317 TODO: This function now uses os underlying 'git' command which is
318 TODO: This function now uses os underlying 'git' command which is
318 generally not good. Should be replaced with algorithm iterating
319 generally not good. Should be replaced with algorithm iterating
319 commits.
320 commits.
320 """
321 """
321 cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path)
322 cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path)
322 # -l ==> outputs long shas (and we need all 40 characters)
323 # -l ==> outputs long shas (and we need all 40 characters)
323 # --root ==> doesn't put '^' character for bounderies
324 # --root ==> doesn't put '^' character for bounderies
324 # -r sha ==> blames for the given revision
325 # -r sha ==> blames for the given revision
325 so, se = self.repository.run_git_command(cmd)
326 so, se = self.repository.run_git_command(cmd)
326
327
327 for i, blame_line in enumerate(so.split('\n')[:-1]):
328 for i, blame_line in enumerate(so.split('\n')[:-1]):
328 ln_no = i + 1
329 ln_no = i + 1
329 sha, line = re.split(r' ', blame_line, 1)
330 sha, line = re.split(r' ', blame_line, 1)
330 yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line)
331 yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line)
331
332
332 def fill_archive(self, stream=None, kind='tgz', prefix=None,
333 def fill_archive(self, stream=None, kind='tgz', prefix=None,
333 subrepos=False):
334 subrepos=False):
334 """
335 """
335 Fills up given stream.
336 Fills up given stream.
336
337
337 :param stream: file like object.
338 :param stream: file like object.
338 :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
339 :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
339 Default: ``tgz``.
340 Default: ``tgz``.
340 :param prefix: name of root directory in archive.
341 :param prefix: name of root directory in archive.
341 Default is repository name and changeset's raw_id joined with dash
342 Default is repository name and changeset's raw_id joined with dash
342 (``repo-tip.<KIND>``).
343 (``repo-tip.<KIND>``).
343 :param subrepos: include subrepos in this archive.
344 :param subrepos: include subrepos in this archive.
344
345
345 :raise ImproperArchiveTypeError: If given kind is wrong.
346 :raise ImproperArchiveTypeError: If given kind is wrong.
346 :raise VcsError: If given stream is None
347 :raise VcsError: If given stream is None
347
348
348 """
349 """
349 allowed_kinds = settings.ARCHIVE_SPECS.keys()
350 allowed_kinds = settings.ARCHIVE_SPECS.keys()
350 if kind not in allowed_kinds:
351 if kind not in allowed_kinds:
351 raise ImproperArchiveTypeError('Archive kind not supported use one'
352 raise ImproperArchiveTypeError('Archive kind not supported use one'
352 'of %s', allowed_kinds)
353 'of %s', allowed_kinds)
353
354
354 if prefix is None:
355 if prefix is None:
355 prefix = '%s-%s' % (self.repository.name, self.short_id)
356 prefix = '%s-%s' % (self.repository.name, self.short_id)
356 elif prefix.startswith('/'):
357 elif prefix.startswith('/'):
357 raise VCSError("Prefix cannot start with leading slash")
358 raise VCSError("Prefix cannot start with leading slash")
358 elif prefix.strip() == '':
359 elif prefix.strip() == '':
359 raise VCSError("Prefix cannot be empty")
360 raise VCSError("Prefix cannot be empty")
360
361
361 if kind == 'zip':
362 if kind == 'zip':
362 frmt = 'zip'
363 frmt = 'zip'
363 else:
364 else:
364 frmt = 'tar'
365 frmt = 'tar'
365 cmd = 'git archive --format=%s --prefix=%s/ %s' % (frmt, prefix,
366 _git_path = rhodecode.CONFIG.get('git_path', 'git')
366 self.raw_id)
367 cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path,
368 frmt, prefix, self.raw_id)
367 if kind == 'tgz':
369 if kind == 'tgz':
368 cmd += ' | gzip -9'
370 cmd += ' | gzip -9'
369 elif kind == 'tbz2':
371 elif kind == 'tbz2':
370 cmd += ' | bzip2 -9'
372 cmd += ' | bzip2 -9'
371
373
372 if stream is None:
374 if stream is None:
373 raise VCSError('You need to pass in a valid stream for filling'
375 raise VCSError('You need to pass in a valid stream for filling'
374 ' with archival data')
376 ' with archival data')
375 popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
377 popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
376 cwd=self.repository.path)
378 cwd=self.repository.path)
377
379
378 buffer_size = 1024 * 8
380 buffer_size = 1024 * 8
379 chunk = popen.stdout.read(buffer_size)
381 chunk = popen.stdout.read(buffer_size)
380 while chunk:
382 while chunk:
381 stream.write(chunk)
383 stream.write(chunk)
382 chunk = popen.stdout.read(buffer_size)
384 chunk = popen.stdout.read(buffer_size)
383 # Make sure all descriptors would be read
385 # Make sure all descriptors would be read
384 popen.communicate()
386 popen.communicate()
385
387
386 def get_nodes(self, path):
388 def get_nodes(self, path):
387 if self._get_kind(path) != NodeKind.DIR:
389 if self._get_kind(path) != NodeKind.DIR:
388 raise ChangesetError("Directory does not exist for revision %r at "
390 raise ChangesetError("Directory does not exist for revision %r at "
389 " %r" % (self.revision, path))
391 " %r" % (self.revision, path))
390 path = self._fix_path(path)
392 path = self._fix_path(path)
391 id = self._get_id_for_path(path)
393 id = self._get_id_for_path(path)
392 tree = self.repository._repo[id]
394 tree = self.repository._repo[id]
393 dirnodes = []
395 dirnodes = []
394 filenodes = []
396 filenodes = []
395 als = self.repository.alias
397 als = self.repository.alias
396 for name, stat, id in tree.iteritems():
398 for name, stat, id in tree.iteritems():
397 if objects.S_ISGITLINK(stat):
399 if objects.S_ISGITLINK(stat):
398 dirnodes.append(SubModuleNode(name, url=None, changeset=id,
400 dirnodes.append(SubModuleNode(name, url=None, changeset=id,
399 alias=als))
401 alias=als))
400 continue
402 continue
401
403
402 obj = self.repository._repo.get_object(id)
404 obj = self.repository._repo.get_object(id)
403 if path != '':
405 if path != '':
404 obj_path = '/'.join((path, name))
406 obj_path = '/'.join((path, name))
405 else:
407 else:
406 obj_path = name
408 obj_path = name
407 if obj_path not in self._stat_modes:
409 if obj_path not in self._stat_modes:
408 self._stat_modes[obj_path] = stat
410 self._stat_modes[obj_path] = stat
409 if isinstance(obj, objects.Tree):
411 if isinstance(obj, objects.Tree):
410 dirnodes.append(DirNode(obj_path, changeset=self))
412 dirnodes.append(DirNode(obj_path, changeset=self))
411 elif isinstance(obj, objects.Blob):
413 elif isinstance(obj, objects.Blob):
412 filenodes.append(FileNode(obj_path, changeset=self, mode=stat))
414 filenodes.append(FileNode(obj_path, changeset=self, mode=stat))
413 else:
415 else:
414 raise ChangesetError("Requested object should be Tree "
416 raise ChangesetError("Requested object should be Tree "
415 "or Blob, is %r" % type(obj))
417 "or Blob, is %r" % type(obj))
416 nodes = dirnodes + filenodes
418 nodes = dirnodes + filenodes
417 for node in nodes:
419 for node in nodes:
418 if not node.path in self.nodes:
420 if not node.path in self.nodes:
419 self.nodes[node.path] = node
421 self.nodes[node.path] = node
420 nodes.sort()
422 nodes.sort()
421 return nodes
423 return nodes
422
424
423 def get_node(self, path):
425 def get_node(self, path):
424 if isinstance(path, unicode):
426 if isinstance(path, unicode):
425 path = path.encode('utf-8')
427 path = path.encode('utf-8')
426 path = self._fix_path(path)
428 path = self._fix_path(path)
427 if not path in self.nodes:
429 if not path in self.nodes:
428 try:
430 try:
429 id_ = self._get_id_for_path(path)
431 id_ = self._get_id_for_path(path)
430 except ChangesetError:
432 except ChangesetError:
431 raise NodeDoesNotExistError("Cannot find one of parents' "
433 raise NodeDoesNotExistError("Cannot find one of parents' "
432 "directories for a given path: %s" % path)
434 "directories for a given path: %s" % path)
433
435
434 _GL = lambda m: m and objects.S_ISGITLINK(m)
436 _GL = lambda m: m and objects.S_ISGITLINK(m)
435 if _GL(self._stat_modes.get(path)):
437 if _GL(self._stat_modes.get(path)):
436 node = SubModuleNode(path, url=None, changeset=id_,
438 node = SubModuleNode(path, url=None, changeset=id_,
437 alias=self.repository.alias)
439 alias=self.repository.alias)
438 else:
440 else:
439 obj = self.repository._repo.get_object(id_)
441 obj = self.repository._repo.get_object(id_)
440
442
441 if isinstance(obj, objects.Tree):
443 if isinstance(obj, objects.Tree):
442 if path == '':
444 if path == '':
443 node = RootNode(changeset=self)
445 node = RootNode(changeset=self)
444 else:
446 else:
445 node = DirNode(path, changeset=self)
447 node = DirNode(path, changeset=self)
446 node._tree = obj
448 node._tree = obj
447 elif isinstance(obj, objects.Blob):
449 elif isinstance(obj, objects.Blob):
448 node = FileNode(path, changeset=self)
450 node = FileNode(path, changeset=self)
449 node._blob = obj
451 node._blob = obj
450 else:
452 else:
451 raise NodeDoesNotExistError("There is no file nor directory "
453 raise NodeDoesNotExistError("There is no file nor directory "
452 "at the given path %r at revision %r"
454 "at the given path %r at revision %r"
453 % (path, self.short_id))
455 % (path, self.short_id))
454 # cache node
456 # cache node
455 self.nodes[path] = node
457 self.nodes[path] = node
456 return self.nodes[path]
458 return self.nodes[path]
457
459
458 @LazyProperty
460 @LazyProperty
459 def affected_files(self):
461 def affected_files(self):
460 """
462 """
461 Get's a fast accessible file changes for given changeset
463 Get's a fast accessible file changes for given changeset
462 """
464 """
463 a, m, d = self._changes_cache
465 a, m, d = self._changes_cache
464 return list(a.union(m).union(d))
466 return list(a.union(m).union(d))
465
467
466 @LazyProperty
468 @LazyProperty
467 def _diff_name_status(self):
469 def _diff_name_status(self):
468 output = []
470 output = []
469 for parent in self.parents:
471 for parent in self.parents:
470 cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id,
472 cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id,
471 self.raw_id)
473 self.raw_id)
472 so, se = self.repository.run_git_command(cmd)
474 so, se = self.repository.run_git_command(cmd)
473 output.append(so.strip())
475 output.append(so.strip())
474 return '\n'.join(output)
476 return '\n'.join(output)
475
477
476 @LazyProperty
478 @LazyProperty
477 def _changes_cache(self):
479 def _changes_cache(self):
478 added = set()
480 added = set()
479 modified = set()
481 modified = set()
480 deleted = set()
482 deleted = set()
481 _r = self.repository._repo
483 _r = self.repository._repo
482
484
483 parents = self.parents
485 parents = self.parents
484 if not self.parents:
486 if not self.parents:
485 parents = [EmptyChangeset()]
487 parents = [EmptyChangeset()]
486 for parent in parents:
488 for parent in parents:
487 if isinstance(parent, EmptyChangeset):
489 if isinstance(parent, EmptyChangeset):
488 oid = None
490 oid = None
489 else:
491 else:
490 oid = _r[parent.raw_id].tree
492 oid = _r[parent.raw_id].tree
491 changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree)
493 changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree)
492 for (oldpath, newpath), (_, _), (_, _) in changes:
494 for (oldpath, newpath), (_, _), (_, _) in changes:
493 if newpath and oldpath:
495 if newpath and oldpath:
494 modified.add(newpath)
496 modified.add(newpath)
495 elif newpath and not oldpath:
497 elif newpath and not oldpath:
496 added.add(newpath)
498 added.add(newpath)
497 elif not newpath and oldpath:
499 elif not newpath and oldpath:
498 deleted.add(oldpath)
500 deleted.add(oldpath)
499 return added, modified, deleted
501 return added, modified, deleted
500
502
501 def _get_paths_for_status(self, status):
503 def _get_paths_for_status(self, status):
502 """
504 """
503 Returns sorted list of paths for given ``status``.
505 Returns sorted list of paths for given ``status``.
504
506
505 :param status: one of: *added*, *modified* or *deleted*
507 :param status: one of: *added*, *modified* or *deleted*
506 """
508 """
507 a, m, d = self._changes_cache
509 a, m, d = self._changes_cache
508 return sorted({
510 return sorted({
509 'added': list(a),
511 'added': list(a),
510 'modified': list(m),
512 'modified': list(m),
511 'deleted': list(d)}[status]
513 'deleted': list(d)}[status]
512 )
514 )
513
515
514 @LazyProperty
516 @LazyProperty
515 def added(self):
517 def added(self):
516 """
518 """
517 Returns list of added ``FileNode`` objects.
519 Returns list of added ``FileNode`` objects.
518 """
520 """
519 if not self.parents:
521 if not self.parents:
520 return list(self._get_file_nodes())
522 return list(self._get_file_nodes())
521 return AddedFileNodesGenerator([n for n in
523 return AddedFileNodesGenerator([n for n in
522 self._get_paths_for_status('added')], self)
524 self._get_paths_for_status('added')], self)
523
525
524 @LazyProperty
526 @LazyProperty
525 def changed(self):
527 def changed(self):
526 """
528 """
527 Returns list of modified ``FileNode`` objects.
529 Returns list of modified ``FileNode`` objects.
528 """
530 """
529 if not self.parents:
531 if not self.parents:
530 return []
532 return []
531 return ChangedFileNodesGenerator([n for n in
533 return ChangedFileNodesGenerator([n for n in
532 self._get_paths_for_status('modified')], self)
534 self._get_paths_for_status('modified')], self)
533
535
534 @LazyProperty
536 @LazyProperty
535 def removed(self):
537 def removed(self):
536 """
538 """
537 Returns list of removed ``FileNode`` objects.
539 Returns list of removed ``FileNode`` objects.
538 """
540 """
539 if not self.parents:
541 if not self.parents:
540 return []
542 return []
541 return RemovedFileNodesGenerator([n for n in
543 return RemovedFileNodesGenerator([n for n in
542 self._get_paths_for_status('deleted')], self)
544 self._get_paths_for_status('deleted')], self)
@@ -1,669 +1,673 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 vcs.backends.git
3 vcs.backends.git
4 ~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~
5
5
6 Git backend implementation.
6 Git backend implementation.
7
7
8 :created_on: Apr 8, 2010
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
10 """
11
11
12 import os
12 import os
13 import re
13 import re
14 import time
14 import time
15 import posixpath
15 import posixpath
16 import logging
16 import logging
17 import traceback
17 import traceback
18 import urllib
18 import urllib
19 import urllib2
19 import urllib2
20 from dulwich.repo import Repo, NotGitRepository
20 from dulwich.repo import Repo, NotGitRepository
21 from dulwich.objects import Tag
21 from dulwich.objects import Tag
22 from string import Template
22 from string import Template
23 from subprocess import Popen, PIPE
23
24 import rhodecode
24 from rhodecode.lib.vcs.backends.base import BaseRepository
25 from rhodecode.lib.vcs.backends.base import BaseRepository
25 from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
26 from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
26 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
27 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
27 from rhodecode.lib.vcs.exceptions import EmptyRepositoryError
28 from rhodecode.lib.vcs.exceptions import EmptyRepositoryError
28 from rhodecode.lib.vcs.exceptions import RepositoryError
29 from rhodecode.lib.vcs.exceptions import RepositoryError
29 from rhodecode.lib.vcs.exceptions import TagAlreadyExistError
30 from rhodecode.lib.vcs.exceptions import TagAlreadyExistError
30 from rhodecode.lib.vcs.exceptions import TagDoesNotExistError
31 from rhodecode.lib.vcs.exceptions import TagDoesNotExistError
31 from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
32 from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
32 from rhodecode.lib.vcs.utils.lazy import LazyProperty, ThreadLocalLazyProperty
33 from rhodecode.lib.vcs.utils.lazy import LazyProperty, ThreadLocalLazyProperty
33 from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
34 from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
34 from rhodecode.lib.vcs.utils.paths import abspath
35 from rhodecode.lib.vcs.utils.paths import abspath
35 from rhodecode.lib.vcs.utils.paths import get_user_home
36 from rhodecode.lib.vcs.utils.paths import get_user_home
36 from .workdir import GitWorkdir
37 from .workdir import GitWorkdir
37 from .changeset import GitChangeset
38 from .changeset import GitChangeset
38 from .inmemory import GitInMemoryChangeset
39 from .inmemory import GitInMemoryChangeset
39 from .config import ConfigFile
40 from .config import ConfigFile
40 from rhodecode.lib import subprocessio
41 from rhodecode.lib import subprocessio
41
42
42
43
43 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
44
45
45
46
46 class GitRepository(BaseRepository):
47 class GitRepository(BaseRepository):
47 """
48 """
48 Git repository backend.
49 Git repository backend.
49 """
50 """
50 DEFAULT_BRANCH_NAME = 'master'
51 DEFAULT_BRANCH_NAME = 'master'
51 scm = 'git'
52 scm = 'git'
52
53
53 def __init__(self, repo_path, create=False, src_url=None,
54 def __init__(self, repo_path, create=False, src_url=None,
54 update_after_clone=False, bare=False):
55 update_after_clone=False, bare=False):
55
56
56 self.path = abspath(repo_path)
57 self.path = abspath(repo_path)
57 repo = self._get_repo(create, src_url, update_after_clone, bare)
58 repo = self._get_repo(create, src_url, update_after_clone, bare)
58 self.bare = repo.bare
59 self.bare = repo.bare
59
60
60 self._config_files = [
61 self._config_files = [
61 bare and abspath(self.path, 'config')
62 bare and abspath(self.path, 'config')
62 or abspath(self.path, '.git', 'config'),
63 or abspath(self.path, '.git', 'config'),
63 abspath(get_user_home(), '.gitconfig'),
64 abspath(get_user_home(), '.gitconfig'),
64 ]
65 ]
65
66
66 @ThreadLocalLazyProperty
67 @ThreadLocalLazyProperty
67 def _repo(self):
68 def _repo(self):
68 repo = Repo(self.path)
69 repo = Repo(self.path)
69 #temporary set that to now at later we will move it to constructor
70 #temporary set that to now at later we will move it to constructor
70 baseui = None
71 baseui = None
71 if baseui is None:
72 if baseui is None:
72 from mercurial.ui import ui
73 from mercurial.ui import ui
73 baseui = ui()
74 baseui = ui()
74 # patch the instance of GitRepo with an "FAKE" ui object to add
75 # patch the instance of GitRepo with an "FAKE" ui object to add
75 # compatibility layer with Mercurial
76 # compatibility layer with Mercurial
76 setattr(repo, 'ui', baseui)
77 setattr(repo, 'ui', baseui)
77 return repo
78 return repo
78
79
79 @property
80 @property
80 def head(self):
81 def head(self):
81 try:
82 try:
82 return self._repo.head()
83 return self._repo.head()
83 except KeyError:
84 except KeyError:
84 return None
85 return None
85
86
86 @LazyProperty
87 @LazyProperty
87 def revisions(self):
88 def revisions(self):
88 """
89 """
89 Returns list of revisions' ids, in ascending order. Being lazy
90 Returns list of revisions' ids, in ascending order. Being lazy
90 attribute allows external tools to inject shas from cache.
91 attribute allows external tools to inject shas from cache.
91 """
92 """
92 return self._get_all_revisions()
93 return self._get_all_revisions()
93
94
94 def run_git_command(self, cmd):
95 @classmethod
96 def _run_git_command(cls, cmd, **opts):
95 """
97 """
96 Runs given ``cmd`` as git command and returns tuple
98 Runs given ``cmd`` as git command and returns tuple
97 (returncode, stdout, stderr).
99 (stdout, stderr).
98
99 .. note::
100 This method exists only until log/blame functionality is implemented
101 at Dulwich (see https://bugs.launchpad.net/bugs/645142). Parsing
102 os command's output is road to hell...
103
100
104 :param cmd: git command to be executed
101 :param cmd: git command to be executed
102 :param opts: env options to pass into Subprocess command
105 """
103 """
106
104
107 _copts = ['-c', 'core.quotepath=false', ]
105 _copts = ['-c', 'core.quotepath=false', ]
108 _str_cmd = False
106 _str_cmd = False
109 if isinstance(cmd, basestring):
107 if isinstance(cmd, basestring):
110 cmd = [cmd]
108 cmd = [cmd]
111 _str_cmd = True
109 _str_cmd = True
112
110
113 gitenv = os.environ
111 gitenv = os.environ
114 # need to clean fix GIT_DIR !
112 # need to clean fix GIT_DIR !
115 if 'GIT_DIR' in gitenv:
113 if 'GIT_DIR' in gitenv:
116 del gitenv['GIT_DIR']
114 del gitenv['GIT_DIR']
117 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
115 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
118
116
119 cmd = ['git'] + _copts + cmd
117 _git_path = rhodecode.CONFIG.get('git_path', 'git')
118 cmd = [_git_path] + _copts + cmd
120 if _str_cmd:
119 if _str_cmd:
121 cmd = ' '.join(cmd)
120 cmd = ' '.join(cmd)
122 try:
121 try:
123 opts = dict(
122 _opts = dict(
124 env=gitenv,
123 env=gitenv,
125 shell=False,
124 shell=False,
126 )
125 )
127 if os.path.isdir(self.path):
126 _opts.update(opts)
128 opts['cwd'] = self.path
127 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
129 p = subprocessio.SubprocessIOChunker(cmd, **opts)
130 except (EnvironmentError, OSError), err:
128 except (EnvironmentError, OSError), err:
131 log.error(traceback.format_exc())
129 log.error(traceback.format_exc())
132 raise RepositoryError("Couldn't run git command (%s).\n"
130 raise RepositoryError("Couldn't run git command (%s).\n"
133 "Original error was:%s" % (cmd, err))
131 "Original error was:%s" % (cmd, err))
134
132
135 return ''.join(p.output), ''.join(p.error)
133 return ''.join(p.output), ''.join(p.error)
136
134
135 def run_git_command(self, cmd):
136 opts = {}
137 if os.path.isdir(self.path):
138 opts['cwd'] = self.path
139 return self._run_git_command(cmd, **opts)
140
137 @classmethod
141 @classmethod
138 def _check_url(cls, url):
142 def _check_url(cls, url):
139 """
143 """
140 Functon will check given url and try to verify if it's a valid
144 Functon will check given url and try to verify if it's a valid
141 link. Sometimes it may happened that mercurial will issue basic
145 link. Sometimes it may happened that mercurial will issue basic
142 auth request that can cause whole API to hang when used from python
146 auth request that can cause whole API to hang when used from python
143 or other external calls.
147 or other external calls.
144
148
145 On failures it'll raise urllib2.HTTPError
149 On failures it'll raise urllib2.HTTPError
146 """
150 """
147 from mercurial.util import url as Url
151 from mercurial.util import url as Url
148
152
149 # those authnadlers are patched for python 2.6.5 bug an
153 # those authnadlers are patched for python 2.6.5 bug an
150 # infinit looping when given invalid resources
154 # infinit looping when given invalid resources
151 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
155 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
152
156
153 # check first if it's not an local url
157 # check first if it's not an local url
154 if os.path.isdir(url) or url.startswith('file:'):
158 if os.path.isdir(url) or url.startswith('file:'):
155 return True
159 return True
156
160
157 if('+' in url[:url.find('://')]):
161 if('+' in url[:url.find('://')]):
158 url = url[url.find('+') + 1:]
162 url = url[url.find('+') + 1:]
159
163
160 handlers = []
164 handlers = []
161 test_uri, authinfo = Url(url).authinfo()
165 test_uri, authinfo = Url(url).authinfo()
162 if not test_uri.endswith('info/refs'):
166 if not test_uri.endswith('info/refs'):
163 test_uri = test_uri.rstrip('/') + '/info/refs'
167 test_uri = test_uri.rstrip('/') + '/info/refs'
164 if authinfo:
168 if authinfo:
165 #create a password manager
169 #create a password manager
166 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
170 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
167 passmgr.add_password(*authinfo)
171 passmgr.add_password(*authinfo)
168
172
169 handlers.extend((httpbasicauthhandler(passmgr),
173 handlers.extend((httpbasicauthhandler(passmgr),
170 httpdigestauthhandler(passmgr)))
174 httpdigestauthhandler(passmgr)))
171
175
172 o = urllib2.build_opener(*handlers)
176 o = urllib2.build_opener(*handlers)
173 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
177 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
174
178
175 q = {"service": 'git-upload-pack'}
179 q = {"service": 'git-upload-pack'}
176 qs = '?%s' % urllib.urlencode(q)
180 qs = '?%s' % urllib.urlencode(q)
177 cu = "%s%s" % (test_uri, qs)
181 cu = "%s%s" % (test_uri, qs)
178 req = urllib2.Request(cu, None, {})
182 req = urllib2.Request(cu, None, {})
179
183
180 try:
184 try:
181 resp = o.open(req)
185 resp = o.open(req)
182 return resp.code == 200
186 return resp.code == 200
183 except Exception, e:
187 except Exception, e:
184 # means it cannot be cloned
188 # means it cannot be cloned
185 raise urllib2.URLError("[%s] %s" % (url, e))
189 raise urllib2.URLError("[%s] %s" % (url, e))
186
190
187 def _get_repo(self, create, src_url=None, update_after_clone=False,
191 def _get_repo(self, create, src_url=None, update_after_clone=False,
188 bare=False):
192 bare=False):
189 if create and os.path.exists(self.path):
193 if create and os.path.exists(self.path):
190 raise RepositoryError("Location already exist")
194 raise RepositoryError("Location already exist")
191 if src_url and not create:
195 if src_url and not create:
192 raise RepositoryError("Create should be set to True if src_url is "
196 raise RepositoryError("Create should be set to True if src_url is "
193 "given (clone operation creates repository)")
197 "given (clone operation creates repository)")
194 try:
198 try:
195 if create and src_url:
199 if create and src_url:
196 GitRepository._check_url(src_url)
200 GitRepository._check_url(src_url)
197 self.clone(src_url, update_after_clone, bare)
201 self.clone(src_url, update_after_clone, bare)
198 return Repo(self.path)
202 return Repo(self.path)
199 elif create:
203 elif create:
200 os.mkdir(self.path)
204 os.mkdir(self.path)
201 if bare:
205 if bare:
202 return Repo.init_bare(self.path)
206 return Repo.init_bare(self.path)
203 else:
207 else:
204 return Repo.init(self.path)
208 return Repo.init(self.path)
205 else:
209 else:
206 return Repo(self.path)
210 return Repo(self.path)
207 except (NotGitRepository, OSError), err:
211 except (NotGitRepository, OSError), err:
208 raise RepositoryError(err)
212 raise RepositoryError(err)
209
213
210 def _get_all_revisions(self):
214 def _get_all_revisions(self):
211 # we must check if this repo is not empty, since later command
215 # we must check if this repo is not empty, since later command
212 # fails if it is. And it's cheaper to ask than throw the subprocess
216 # fails if it is. And it's cheaper to ask than throw the subprocess
213 # errors
217 # errors
214 try:
218 try:
215 self._repo.head()
219 self._repo.head()
216 except KeyError:
220 except KeyError:
217 return []
221 return []
218 cmd = 'rev-list --all --reverse --date-order'
222 cmd = 'rev-list --all --reverse --date-order'
219 try:
223 try:
220 so, se = self.run_git_command(cmd)
224 so, se = self.run_git_command(cmd)
221 except RepositoryError:
225 except RepositoryError:
222 # Can be raised for empty repositories
226 # Can be raised for empty repositories
223 return []
227 return []
224 return so.splitlines()
228 return so.splitlines()
225
229
226 def _get_all_revisions2(self):
230 def _get_all_revisions2(self):
227 #alternate implementation using dulwich
231 #alternate implementation using dulwich
228 includes = [x[1][0] for x in self._parsed_refs.iteritems()
232 includes = [x[1][0] for x in self._parsed_refs.iteritems()
229 if x[1][1] != 'T']
233 if x[1][1] != 'T']
230 return [c.commit.id for c in self._repo.get_walker(include=includes)]
234 return [c.commit.id for c in self._repo.get_walker(include=includes)]
231
235
232 def _get_revision(self, revision):
236 def _get_revision(self, revision):
233 """
237 """
234 For git backend we always return integer here. This way we ensure
238 For git backend we always return integer here. This way we ensure
235 that changset's revision attribute would become integer.
239 that changset's revision attribute would become integer.
236 """
240 """
237 pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
241 pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
238 is_bstr = lambda o: isinstance(o, (str, unicode))
242 is_bstr = lambda o: isinstance(o, (str, unicode))
239 is_null = lambda o: len(o) == revision.count('0')
243 is_null = lambda o: len(o) == revision.count('0')
240
244
241 if len(self.revisions) == 0:
245 if len(self.revisions) == 0:
242 raise EmptyRepositoryError("There are no changesets yet")
246 raise EmptyRepositoryError("There are no changesets yet")
243
247
244 if revision in (None, '', 'tip', 'HEAD', 'head', -1):
248 if revision in (None, '', 'tip', 'HEAD', 'head', -1):
245 revision = self.revisions[-1]
249 revision = self.revisions[-1]
246
250
247 if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12)
251 if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12)
248 or isinstance(revision, int) or is_null(revision)):
252 or isinstance(revision, int) or is_null(revision)):
249 try:
253 try:
250 revision = self.revisions[int(revision)]
254 revision = self.revisions[int(revision)]
251 except:
255 except:
252 raise ChangesetDoesNotExistError("Revision %r does not exist "
256 raise ChangesetDoesNotExistError("Revision %r does not exist "
253 "for this repository %s" % (revision, self))
257 "for this repository %s" % (revision, self))
254
258
255 elif is_bstr(revision):
259 elif is_bstr(revision):
256 # get by branch/tag name
260 # get by branch/tag name
257 _ref_revision = self._parsed_refs.get(revision)
261 _ref_revision = self._parsed_refs.get(revision)
258 _tags_shas = self.tags.values()
262 _tags_shas = self.tags.values()
259 if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']:
263 if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']:
260 return _ref_revision[0]
264 return _ref_revision[0]
261
265
262 # maybe it's a tag ? we don't have them in self.revisions
266 # maybe it's a tag ? we don't have them in self.revisions
263 elif revision in _tags_shas:
267 elif revision in _tags_shas:
264 return _tags_shas[_tags_shas.index(revision)]
268 return _tags_shas[_tags_shas.index(revision)]
265
269
266 elif not pattern.match(revision) or revision not in self.revisions:
270 elif not pattern.match(revision) or revision not in self.revisions:
267 raise ChangesetDoesNotExistError("Revision %r does not exist "
271 raise ChangesetDoesNotExistError("Revision %r does not exist "
268 "for this repository %s" % (revision, self))
272 "for this repository %s" % (revision, self))
269
273
270 # Ensure we return full id
274 # Ensure we return full id
271 if not pattern.match(str(revision)):
275 if not pattern.match(str(revision)):
272 raise ChangesetDoesNotExistError("Given revision %r not recognized"
276 raise ChangesetDoesNotExistError("Given revision %r not recognized"
273 % revision)
277 % revision)
274 return revision
278 return revision
275
279
276 def _get_archives(self, archive_name='tip'):
280 def _get_archives(self, archive_name='tip'):
277
281
278 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
282 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
279 yield {"type": i[0], "extension": i[1], "node": archive_name}
283 yield {"type": i[0], "extension": i[1], "node": archive_name}
280
284
281 def _get_url(self, url):
285 def _get_url(self, url):
282 """
286 """
283 Returns normalized url. If schema is not given, would fall to
287 Returns normalized url. If schema is not given, would fall to
284 filesystem (``file:///``) schema.
288 filesystem (``file:///``) schema.
285 """
289 """
286 url = str(url)
290 url = str(url)
287 if url != 'default' and not '://' in url:
291 if url != 'default' and not '://' in url:
288 url = ':///'.join(('file', url))
292 url = ':///'.join(('file', url))
289 return url
293 return url
290
294
291 @LazyProperty
295 @LazyProperty
292 def name(self):
296 def name(self):
293 return os.path.basename(self.path)
297 return os.path.basename(self.path)
294
298
295 @LazyProperty
299 @LazyProperty
296 def last_change(self):
300 def last_change(self):
297 """
301 """
298 Returns last change made on this repository as datetime object
302 Returns last change made on this repository as datetime object
299 """
303 """
300 return date_fromtimestamp(self._get_mtime(), makedate()[1])
304 return date_fromtimestamp(self._get_mtime(), makedate()[1])
301
305
302 def _get_mtime(self):
306 def _get_mtime(self):
303 try:
307 try:
304 return time.mktime(self.get_changeset().date.timetuple())
308 return time.mktime(self.get_changeset().date.timetuple())
305 except RepositoryError:
309 except RepositoryError:
306 idx_loc = '' if self.bare else '.git'
310 idx_loc = '' if self.bare else '.git'
307 # fallback to filesystem
311 # fallback to filesystem
308 in_path = os.path.join(self.path, idx_loc, "index")
312 in_path = os.path.join(self.path, idx_loc, "index")
309 he_path = os.path.join(self.path, idx_loc, "HEAD")
313 he_path = os.path.join(self.path, idx_loc, "HEAD")
310 if os.path.exists(in_path):
314 if os.path.exists(in_path):
311 return os.stat(in_path).st_mtime
315 return os.stat(in_path).st_mtime
312 else:
316 else:
313 return os.stat(he_path).st_mtime
317 return os.stat(he_path).st_mtime
314
318
315 @LazyProperty
319 @LazyProperty
316 def description(self):
320 def description(self):
317 idx_loc = '' if self.bare else '.git'
321 idx_loc = '' if self.bare else '.git'
318 undefined_description = u'unknown'
322 undefined_description = u'unknown'
319 description_path = os.path.join(self.path, idx_loc, 'description')
323 description_path = os.path.join(self.path, idx_loc, 'description')
320 if os.path.isfile(description_path):
324 if os.path.isfile(description_path):
321 return safe_unicode(open(description_path).read())
325 return safe_unicode(open(description_path).read())
322 else:
326 else:
323 return undefined_description
327 return undefined_description
324
328
325 @LazyProperty
329 @LazyProperty
326 def contact(self):
330 def contact(self):
327 undefined_contact = u'Unknown'
331 undefined_contact = u'Unknown'
328 return undefined_contact
332 return undefined_contact
329
333
330 @property
334 @property
331 def branches(self):
335 def branches(self):
332 if not self.revisions:
336 if not self.revisions:
333 return {}
337 return {}
334 sortkey = lambda ctx: ctx[0]
338 sortkey = lambda ctx: ctx[0]
335 _branches = [(x[0], x[1][0])
339 _branches = [(x[0], x[1][0])
336 for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
340 for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
337 return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
341 return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
338
342
339 @LazyProperty
343 @LazyProperty
340 def tags(self):
344 def tags(self):
341 return self._get_tags()
345 return self._get_tags()
342
346
343 def _get_tags(self):
347 def _get_tags(self):
344 if not self.revisions:
348 if not self.revisions:
345 return {}
349 return {}
346
350
347 sortkey = lambda ctx: ctx[0]
351 sortkey = lambda ctx: ctx[0]
348 _tags = [(x[0], x[1][0])
352 _tags = [(x[0], x[1][0])
349 for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
353 for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
350 return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
354 return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
351
355
352 def tag(self, name, user, revision=None, message=None, date=None,
356 def tag(self, name, user, revision=None, message=None, date=None,
353 **kwargs):
357 **kwargs):
354 """
358 """
355 Creates and returns a tag for the given ``revision``.
359 Creates and returns a tag for the given ``revision``.
356
360
357 :param name: name for new tag
361 :param name: name for new tag
358 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
359 :param revision: changeset id for which new tag would be created
363 :param revision: changeset id for which new tag would be created
360 :param message: message of the tag's commit
364 :param message: message of the tag's commit
361 :param date: date of tag's commit
365 :param date: date of tag's commit
362
366
363 :raises TagAlreadyExistError: if tag with same name already exists
367 :raises TagAlreadyExistError: if tag with same name already exists
364 """
368 """
365 if name in self.tags:
369 if name in self.tags:
366 raise TagAlreadyExistError("Tag %s already exists" % name)
370 raise TagAlreadyExistError("Tag %s already exists" % name)
367 changeset = self.get_changeset(revision)
371 changeset = self.get_changeset(revision)
368 message = message or "Added tag %s for commit %s" % (name,
372 message = message or "Added tag %s for commit %s" % (name,
369 changeset.raw_id)
373 changeset.raw_id)
370 self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
374 self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
371
375
372 self._parsed_refs = self._get_parsed_refs()
376 self._parsed_refs = self._get_parsed_refs()
373 self.tags = self._get_tags()
377 self.tags = self._get_tags()
374 return changeset
378 return changeset
375
379
376 def remove_tag(self, name, user, message=None, date=None):
380 def remove_tag(self, name, user, message=None, date=None):
377 """
381 """
378 Removes tag with the given ``name``.
382 Removes tag with the given ``name``.
379
383
380 :param name: name of the tag to be removed
384 :param name: name of the tag to be removed
381 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
385 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
382 :param message: message of the tag's removal commit
386 :param message: message of the tag's removal commit
383 :param date: date of tag's removal commit
387 :param date: date of tag's removal commit
384
388
385 :raises TagDoesNotExistError: if tag with given name does not exists
389 :raises TagDoesNotExistError: if tag with given name does not exists
386 """
390 """
387 if name not in self.tags:
391 if name not in self.tags:
388 raise TagDoesNotExistError("Tag %s does not exist" % name)
392 raise TagDoesNotExistError("Tag %s does not exist" % name)
389 tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
393 tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
390 try:
394 try:
391 os.remove(tagpath)
395 os.remove(tagpath)
392 self._parsed_refs = self._get_parsed_refs()
396 self._parsed_refs = self._get_parsed_refs()
393 self.tags = self._get_tags()
397 self.tags = self._get_tags()
394 except OSError, e:
398 except OSError, e:
395 raise RepositoryError(e.strerror)
399 raise RepositoryError(e.strerror)
396
400
397 @LazyProperty
401 @LazyProperty
398 def _parsed_refs(self):
402 def _parsed_refs(self):
399 return self._get_parsed_refs()
403 return self._get_parsed_refs()
400
404
401 def _get_parsed_refs(self):
405 def _get_parsed_refs(self):
402 refs = self._repo.get_refs()
406 refs = self._repo.get_refs()
403 keys = [('refs/heads/', 'H'),
407 keys = [('refs/heads/', 'H'),
404 ('refs/remotes/origin/', 'RH'),
408 ('refs/remotes/origin/', 'RH'),
405 ('refs/tags/', 'T')]
409 ('refs/tags/', 'T')]
406 _refs = {}
410 _refs = {}
407 for ref, sha in refs.iteritems():
411 for ref, sha in refs.iteritems():
408 for k, type_ in keys:
412 for k, type_ in keys:
409 if ref.startswith(k):
413 if ref.startswith(k):
410 _key = ref[len(k):]
414 _key = ref[len(k):]
411 if type_ == 'T':
415 if type_ == 'T':
412 obj = self._repo.get_object(sha)
416 obj = self._repo.get_object(sha)
413 if isinstance(obj, Tag):
417 if isinstance(obj, Tag):
414 sha = self._repo.get_object(sha).object[1]
418 sha = self._repo.get_object(sha).object[1]
415 _refs[_key] = [sha, type_]
419 _refs[_key] = [sha, type_]
416 break
420 break
417 return _refs
421 return _refs
418
422
419 def _heads(self, reverse=False):
423 def _heads(self, reverse=False):
420 refs = self._repo.get_refs()
424 refs = self._repo.get_refs()
421 heads = {}
425 heads = {}
422
426
423 for key, val in refs.items():
427 for key, val in refs.items():
424 for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
428 for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
425 if key.startswith(ref_key):
429 if key.startswith(ref_key):
426 n = key[len(ref_key):]
430 n = key[len(ref_key):]
427 if n not in ['HEAD']:
431 if n not in ['HEAD']:
428 heads[n] = val
432 heads[n] = val
429
433
430 return heads if reverse else dict((y, x) for x, y in heads.iteritems())
434 return heads if reverse else dict((y, x) for x, y in heads.iteritems())
431
435
432 def get_changeset(self, revision=None):
436 def get_changeset(self, revision=None):
433 """
437 """
434 Returns ``GitChangeset`` object representing commit from git repository
438 Returns ``GitChangeset`` object representing commit from git repository
435 at the given revision or head (most recent commit) if None given.
439 at the given revision or head (most recent commit) if None given.
436 """
440 """
437 if isinstance(revision, GitChangeset):
441 if isinstance(revision, GitChangeset):
438 return revision
442 return revision
439 revision = self._get_revision(revision)
443 revision = self._get_revision(revision)
440 changeset = GitChangeset(repository=self, revision=revision)
444 changeset = GitChangeset(repository=self, revision=revision)
441 return changeset
445 return changeset
442
446
443 def get_changesets(self, start=None, end=None, start_date=None,
447 def get_changesets(self, start=None, end=None, start_date=None,
444 end_date=None, branch_name=None, reverse=False):
448 end_date=None, branch_name=None, reverse=False):
445 """
449 """
446 Returns iterator of ``GitChangeset`` objects from start to end (both
450 Returns iterator of ``GitChangeset`` objects from start to end (both
447 are inclusive), in ascending date order (unless ``reverse`` is set).
451 are inclusive), in ascending date order (unless ``reverse`` is set).
448
452
449 :param start: changeset ID, as str; first returned changeset
453 :param start: changeset ID, as str; first returned changeset
450 :param end: changeset ID, as str; last returned changeset
454 :param end: changeset ID, as str; last returned changeset
451 :param start_date: if specified, changesets with commit date less than
455 :param start_date: if specified, changesets with commit date less than
452 ``start_date`` would be filtered out from returned set
456 ``start_date`` would be filtered out from returned set
453 :param end_date: if specified, changesets with commit date greater than
457 :param end_date: if specified, changesets with commit date greater than
454 ``end_date`` would be filtered out from returned set
458 ``end_date`` would be filtered out from returned set
455 :param branch_name: if specified, changesets not reachable from given
459 :param branch_name: if specified, changesets not reachable from given
456 branch would be filtered out from returned set
460 branch would be filtered out from returned set
457 :param reverse: if ``True``, returned generator would be reversed
461 :param reverse: if ``True``, returned generator would be reversed
458 (meaning that returned changesets would have descending date order)
462 (meaning that returned changesets would have descending date order)
459
463
460 :raise BranchDoesNotExistError: If given ``branch_name`` does not
464 :raise BranchDoesNotExistError: If given ``branch_name`` does not
461 exist.
465 exist.
462 :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
466 :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
463 ``end`` could not be found.
467 ``end`` could not be found.
464
468
465 """
469 """
466 if branch_name and branch_name not in self.branches:
470 if branch_name and branch_name not in self.branches:
467 raise BranchDoesNotExistError("Branch '%s' not found" \
471 raise BranchDoesNotExistError("Branch '%s' not found" \
468 % branch_name)
472 % branch_name)
469 # %H at format means (full) commit hash, initial hashes are retrieved
473 # %H at format means (full) commit hash, initial hashes are retrieved
470 # in ascending date order
474 # in ascending date order
471 cmd_template = 'log --date-order --reverse --pretty=format:"%H"'
475 cmd_template = 'log --date-order --reverse --pretty=format:"%H"'
472 cmd_params = {}
476 cmd_params = {}
473 if start_date:
477 if start_date:
474 cmd_template += ' --since "$since"'
478 cmd_template += ' --since "$since"'
475 cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S')
479 cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S')
476 if end_date:
480 if end_date:
477 cmd_template += ' --until "$until"'
481 cmd_template += ' --until "$until"'
478 cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S')
482 cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S')
479 if branch_name:
483 if branch_name:
480 cmd_template += ' $branch_name'
484 cmd_template += ' $branch_name'
481 cmd_params['branch_name'] = branch_name
485 cmd_params['branch_name'] = branch_name
482 else:
486 else:
483 cmd_template += ' --all'
487 cmd_template += ' --all'
484
488
485 cmd = Template(cmd_template).safe_substitute(**cmd_params)
489 cmd = Template(cmd_template).safe_substitute(**cmd_params)
486 revs = self.run_git_command(cmd)[0].splitlines()
490 revs = self.run_git_command(cmd)[0].splitlines()
487 start_pos = 0
491 start_pos = 0
488 end_pos = len(revs)
492 end_pos = len(revs)
489 if start:
493 if start:
490 _start = self._get_revision(start)
494 _start = self._get_revision(start)
491 try:
495 try:
492 start_pos = revs.index(_start)
496 start_pos = revs.index(_start)
493 except ValueError:
497 except ValueError:
494 pass
498 pass
495
499
496 if end is not None:
500 if end is not None:
497 _end = self._get_revision(end)
501 _end = self._get_revision(end)
498 try:
502 try:
499 end_pos = revs.index(_end)
503 end_pos = revs.index(_end)
500 except ValueError:
504 except ValueError:
501 pass
505 pass
502
506
503 if None not in [start, end] and start_pos > end_pos:
507 if None not in [start, end] and start_pos > end_pos:
504 raise RepositoryError('start cannot be after end')
508 raise RepositoryError('start cannot be after end')
505
509
506 if end_pos is not None:
510 if end_pos is not None:
507 end_pos += 1
511 end_pos += 1
508
512
509 revs = revs[start_pos:end_pos]
513 revs = revs[start_pos:end_pos]
510 if reverse:
514 if reverse:
511 revs = reversed(revs)
515 revs = reversed(revs)
512 for rev in revs:
516 for rev in revs:
513 yield self.get_changeset(rev)
517 yield self.get_changeset(rev)
514
518
515 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
519 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
516 context=3):
520 context=3):
517 """
521 """
518 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 Returns (git like) *diff*, as plain text. Shows changes introduced by
519 ``rev2`` since ``rev1``.
523 ``rev2`` since ``rev1``.
520
524
521 :param rev1: Entry point from which diff is shown. Can be
525 :param rev1: Entry point from which diff is shown. Can be
522 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
526 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
523 the changes since empty state of the repository until ``rev2``
527 the changes since empty state of the repository until ``rev2``
524 :param rev2: Until which revision changes should be shown.
528 :param rev2: Until which revision changes should be shown.
525 :param ignore_whitespace: If set to ``True``, would not show whitespace
529 :param ignore_whitespace: If set to ``True``, would not show whitespace
526 changes. Defaults to ``False``.
530 changes. Defaults to ``False``.
527 :param context: How many lines before/after changed lines should be
531 :param context: How many lines before/after changed lines should be
528 shown. Defaults to ``3``.
532 shown. Defaults to ``3``.
529 """
533 """
530 flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40']
534 flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40']
531 if ignore_whitespace:
535 if ignore_whitespace:
532 flags.append('-w')
536 flags.append('-w')
533
537
534 if hasattr(rev1, 'raw_id'):
538 if hasattr(rev1, 'raw_id'):
535 rev1 = getattr(rev1, 'raw_id')
539 rev1 = getattr(rev1, 'raw_id')
536
540
537 if hasattr(rev2, 'raw_id'):
541 if hasattr(rev2, 'raw_id'):
538 rev2 = getattr(rev2, 'raw_id')
542 rev2 = getattr(rev2, 'raw_id')
539
543
540 if rev1 == self.EMPTY_CHANGESET:
544 if rev1 == self.EMPTY_CHANGESET:
541 rev2 = self.get_changeset(rev2).raw_id
545 rev2 = self.get_changeset(rev2).raw_id
542 cmd = ' '.join(['show'] + flags + [rev2])
546 cmd = ' '.join(['show'] + flags + [rev2])
543 else:
547 else:
544 rev1 = self.get_changeset(rev1).raw_id
548 rev1 = self.get_changeset(rev1).raw_id
545 rev2 = self.get_changeset(rev2).raw_id
549 rev2 = self.get_changeset(rev2).raw_id
546 cmd = ' '.join(['diff'] + flags + [rev1, rev2])
550 cmd = ' '.join(['diff'] + flags + [rev1, rev2])
547
551
548 if path:
552 if path:
549 cmd += ' -- "%s"' % path
553 cmd += ' -- "%s"' % path
550
554
551 stdout, stderr = self.run_git_command(cmd)
555 stdout, stderr = self.run_git_command(cmd)
552 # If we used 'show' command, strip first few lines (until actual diff
556 # If we used 'show' command, strip first few lines (until actual diff
553 # starts)
557 # starts)
554 if rev1 == self.EMPTY_CHANGESET:
558 if rev1 == self.EMPTY_CHANGESET:
555 lines = stdout.splitlines()
559 lines = stdout.splitlines()
556 x = 0
560 x = 0
557 for line in lines:
561 for line in lines:
558 if line.startswith('diff'):
562 if line.startswith('diff'):
559 break
563 break
560 x += 1
564 x += 1
561 # Append new line just like 'diff' command do
565 # Append new line just like 'diff' command do
562 stdout = '\n'.join(lines[x:]) + '\n'
566 stdout = '\n'.join(lines[x:]) + '\n'
563 return stdout
567 return stdout
564
568
565 @LazyProperty
569 @LazyProperty
566 def in_memory_changeset(self):
570 def in_memory_changeset(self):
567 """
571 """
568 Returns ``GitInMemoryChangeset`` object for this repository.
572 Returns ``GitInMemoryChangeset`` object for this repository.
569 """
573 """
570 return GitInMemoryChangeset(self)
574 return GitInMemoryChangeset(self)
571
575
572 def clone(self, url, update_after_clone=True, bare=False):
576 def clone(self, url, update_after_clone=True, bare=False):
573 """
577 """
574 Tries to clone changes from external location.
578 Tries to clone changes from external location.
575
579
576 :param update_after_clone: If set to ``False``, git won't checkout
580 :param update_after_clone: If set to ``False``, git won't checkout
577 working directory
581 working directory
578 :param bare: If set to ``True``, repository would be cloned into
582 :param bare: If set to ``True``, repository would be cloned into
579 *bare* git repository (no working directory at all).
583 *bare* git repository (no working directory at all).
580 """
584 """
581 url = self._get_url(url)
585 url = self._get_url(url)
582 cmd = ['clone']
586 cmd = ['clone']
583 if bare:
587 if bare:
584 cmd.append('--bare')
588 cmd.append('--bare')
585 elif not update_after_clone:
589 elif not update_after_clone:
586 cmd.append('--no-checkout')
590 cmd.append('--no-checkout')
587 cmd += ['--', '"%s"' % url, '"%s"' % self.path]
591 cmd += ['--', '"%s"' % url, '"%s"' % self.path]
588 cmd = ' '.join(cmd)
592 cmd = ' '.join(cmd)
589 # If error occurs run_git_command raises RepositoryError already
593 # If error occurs run_git_command raises RepositoryError already
590 self.run_git_command(cmd)
594 self.run_git_command(cmd)
591
595
592 def pull(self, url):
596 def pull(self, url):
593 """
597 """
594 Tries to pull changes from external location.
598 Tries to pull changes from external location.
595 """
599 """
596 url = self._get_url(url)
600 url = self._get_url(url)
597 cmd = ['pull']
601 cmd = ['pull']
598 cmd.append("--ff-only")
602 cmd.append("--ff-only")
599 cmd.append(url)
603 cmd.append(url)
600 cmd = ' '.join(cmd)
604 cmd = ' '.join(cmd)
601 # If error occurs run_git_command raises RepositoryError already
605 # If error occurs run_git_command raises RepositoryError already
602 self.run_git_command(cmd)
606 self.run_git_command(cmd)
603
607
604 def fetch(self, url):
608 def fetch(self, url):
605 """
609 """
606 Tries to pull changes from external location.
610 Tries to pull changes from external location.
607 """
611 """
608 url = self._get_url(url)
612 url = self._get_url(url)
609 so, se = self.run_git_command('ls-remote -h %s' % url)
613 so, se = self.run_git_command('ls-remote -h %s' % url)
610 refs = []
614 refs = []
611 for line in (x for x in so.splitlines()):
615 for line in (x for x in so.splitlines()):
612 sha, ref = line.split('\t')
616 sha, ref = line.split('\t')
613 refs.append(ref)
617 refs.append(ref)
614 refs = ' '.join(('+%s:%s' % (r, r) for r in refs))
618 refs = ' '.join(('+%s:%s' % (r, r) for r in refs))
615 cmd = '''fetch %s -- %s''' % (url, refs)
619 cmd = '''fetch %s -- %s''' % (url, refs)
616 self.run_git_command(cmd)
620 self.run_git_command(cmd)
617
621
618 @LazyProperty
622 @LazyProperty
619 def workdir(self):
623 def workdir(self):
620 """
624 """
621 Returns ``Workdir`` instance for this repository.
625 Returns ``Workdir`` instance for this repository.
622 """
626 """
623 return GitWorkdir(self)
627 return GitWorkdir(self)
624
628
625 def get_config_value(self, section, name, config_file=None):
629 def get_config_value(self, section, name, config_file=None):
626 """
630 """
627 Returns configuration value for a given [``section``] and ``name``.
631 Returns configuration value for a given [``section``] and ``name``.
628
632
629 :param section: Section we want to retrieve value from
633 :param section: Section we want to retrieve value from
630 :param name: Name of configuration we want to retrieve
634 :param name: Name of configuration we want to retrieve
631 :param config_file: A path to file which should be used to retrieve
635 :param config_file: A path to file which should be used to retrieve
632 configuration from (might also be a list of file paths)
636 configuration from (might also be a list of file paths)
633 """
637 """
634 if config_file is None:
638 if config_file is None:
635 config_file = []
639 config_file = []
636 elif isinstance(config_file, basestring):
640 elif isinstance(config_file, basestring):
637 config_file = [config_file]
641 config_file = [config_file]
638
642
639 def gen_configs():
643 def gen_configs():
640 for path in config_file + self._config_files:
644 for path in config_file + self._config_files:
641 try:
645 try:
642 yield ConfigFile.from_path(path)
646 yield ConfigFile.from_path(path)
643 except (IOError, OSError, ValueError):
647 except (IOError, OSError, ValueError):
644 continue
648 continue
645
649
646 for config in gen_configs():
650 for config in gen_configs():
647 try:
651 try:
648 return config.get(section, name)
652 return config.get(section, name)
649 except KeyError:
653 except KeyError:
650 continue
654 continue
651 return None
655 return None
652
656
653 def get_user_name(self, config_file=None):
657 def get_user_name(self, config_file=None):
654 """
658 """
655 Returns user's name from global configuration file.
659 Returns user's name from global configuration file.
656
660
657 :param config_file: A path to file which should be used to retrieve
661 :param config_file: A path to file which should be used to retrieve
658 configuration from (might also be a list of file paths)
662 configuration from (might also be a list of file paths)
659 """
663 """
660 return self.get_config_value('user', 'name', config_file)
664 return self.get_config_value('user', 'name', config_file)
661
665
662 def get_user_email(self, config_file=None):
666 def get_user_email(self, config_file=None):
663 """
667 """
664 Returns user's email from global configuration file.
668 Returns user's email from global configuration file.
665
669
666 :param config_file: A path to file which should be used to retrieve
670 :param config_file: A path to file which should be used to retrieve
667 configuration from (might also be a list of file paths)
671 configuration from (might also be a list of file paths)
668 """
672 """
669 return self.get_config_value('user', 'email', config_file)
673 return self.get_config_value('user', 'email', config_file)
General Comments 0
You need to be logged in to leave comments. Login now