##// END OF EJS Templates
git executable is now configurable via .ini files
marcink -
r3376:e67b2ef0 beta
parent child Browse files
Show More
@@ -1,442 +1,447 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # RhodeCode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 pdebug = false
11 11 ################################################################################
12 12 ## Uncomment and replace with the address which should receive ##
13 13 ## any error reports after application crash ##
14 14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 15 ################################################################################
16 16 #email_to = admin@localhost
17 17 #error_email_from = paste_error@localhost
18 18 #app_email_from = rhodecode-noreply@localhost
19 19 #error_message =
20 20 #email_prefix = [RhodeCode]
21 21
22 22 #smtp_server = mail.server.com
23 23 #smtp_username =
24 24 #smtp_password =
25 25 #smtp_port =
26 26 #smtp_use_tls = false
27 27 #smtp_use_ssl = true
28 28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 29 #smtp_auth =
30 30
31 31 [server:main]
32 32 ## PASTE
33 33 ##nr of threads to spawn
34 34 #threadpool_workers = 5
35 35
36 36 ##max request before thread respawn
37 37 #threadpool_max_requests = 10
38 38
39 39 ##option to use threads of process
40 40 #use_threadpool = true
41 41
42 42 #use = egg:Paste#http
43 43
44 44 #WAITRESS
45 45 threads = 5
46 #100GB
47 max_request_body_size = 107374182400
46 48 use = egg:waitress#main
47 49
48 50 host = 0.0.0.0
49 51 port = 5000
50 52
51 53 [filter:proxy-prefix]
52 54 # prefix middleware for rc
53 55 use = egg:PasteDeploy#prefix
54 56 prefix = /<your-prefix>
55 57
56 58 [app:main]
57 59 use = egg:rhodecode
58 60 #filter-with = proxy-prefix
59 61 full_stack = true
60 62 static_files = true
61 63 # Optional Languages
62 64 # en, fr, ja, pt_BR, zh_CN, zh_TW, pl
63 65 lang = en
64 66 cache_dir = %(here)s/data
65 67 index_dir = %(here)s/data/index
66 68 app_instance_uuid = rc-develop
67 69 cut_off_limit = 256000
68 70 vcs_full_cache = True
69 71 # force https in RhodeCode, fixes https redirects, assumes it's always https
70 72 force_https = false
71 73 # use Strict-Transport-Security headers
72 74 use_htsts = false
73 75 commit_parse_limit = 25
74 76 # number of items displayed in lightweight dashboard before paginating
75 77 dashboard_items = 100
76 78 use_gravatar = true
77 79
80 # path to git executable
81 git_path = git
82
78 83 ## RSS feed options
79 84
80 85 rss_cut_off_limit = 256000
81 86 rss_items_per_page = 10
82 87 rss_include_diff = false
83 88
84 89
85 90 ## alternative_gravatar_url allows you to use your own avatar server application
86 91 ## the following parts of the URL will be replaced
87 92 ## {email} user email
88 93 ## {md5email} md5 hash of the user email (like at gravatar.com)
89 94 ## {size} size of the image that is expected from the server application
90 95 ## {scheme} http/https from RhodeCode server
91 96 ## {netloc} network location from RhodeCode server
92 97 #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
93 98 #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
94 99
95 100 container_auth_enabled = false
96 101 proxypass_auth_enabled = false
97 102 ## default encoding used to convert from and to unicode
98 103 ## can be also a comma seperated list of encoding in case of mixed encodings
99 104 default_encoding = utf8
100 105
101 106 ## overwrite schema of clone url
102 107 ## available vars:
103 108 ## scheme - http/https
104 109 ## user - current user
105 110 ## pass - password
106 111 ## netloc - network location
107 112 ## path - usually repo_name
108 113
109 114 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
110 115
111 116 ## issue tracking mapping for commits messages
112 117 ## comment out issue_pat, issue_server, issue_prefix to enable
113 118
114 119 ## pattern to get the issues from commit messages
115 120 ## default one used here is #<numbers> with a regex passive group for `#`
116 121 ## {id} will be all groups matched from this pattern
117 122
118 123 issue_pat = (?:\s*#)(\d+)
119 124
120 125 ## server url to the issue, each {id} will be replaced with match
121 126 ## fetched from the regex and {repo} is replaced with full repository name
122 127 ## including groups {repo_name} is replaced with just name of repo
123 128
124 129 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
125 130
126 131 ## prefix to add to link to indicate it's an url
127 132 ## #314 will be replaced by <issue_prefix><id>
128 133
129 134 issue_prefix = #
130 135
131 136 ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
132 137 ## multiple patterns, to other issues server, wiki or others
133 138 ## below an example how to create a wiki pattern
134 139 # #wiki-some-id -> https://mywiki.com/some-id
135 140
136 141 #issue_pat_wiki = (?:wiki-)(.+)
137 142 #issue_server_link_wiki = https://mywiki.com/{id}
138 143 #issue_prefix_wiki = WIKI-
139 144
140 145
141 146 ## instance-id prefix
142 147 ## a prefix key for this instance used for cache invalidation when running
143 148 ## multiple instances of rhodecode, make sure it's globally unique for
144 149 ## all running rhodecode instances. Leave empty if you don't use it
145 150 instance_id =
146 151
147 152 ## alternative return HTTP header for failed authentication. Default HTTP
148 153 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
149 154 ## handling that. Set this variable to 403 to return HTTPForbidden
150 155 auth_ret_code =
151 156
152 157 ####################################
153 158 ### CELERY CONFIG ####
154 159 ####################################
155 160 use_celery = false
156 161 broker.host = localhost
157 162 broker.vhost = rabbitmqhost
158 163 broker.port = 5672
159 164 broker.user = rabbitmq
160 165 broker.password = qweqwe
161 166
162 167 celery.imports = rhodecode.lib.celerylib.tasks
163 168
164 169 celery.result.backend = amqp
165 170 celery.result.dburi = amqp://
166 171 celery.result.serialier = json
167 172
168 173 #celery.send.task.error.emails = true
169 174 #celery.amqp.task.result.expires = 18000
170 175
171 176 celeryd.concurrency = 2
172 177 #celeryd.log.file = celeryd.log
173 178 celeryd.log.level = debug
174 179 celeryd.max.tasks.per.child = 1
175 180
176 181 #tasks will never be sent to the queue, but executed locally instead.
177 182 celery.always.eager = false
178 183
179 184 ####################################
180 185 ### BEAKER CACHE ####
181 186 ####################################
182 187 beaker.cache.data_dir=%(here)s/data/cache/data
183 188 beaker.cache.lock_dir=%(here)s/data/cache/lock
184 189
185 190 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
186 191
187 192 beaker.cache.super_short_term.type=memory
188 193 beaker.cache.super_short_term.expire=10
189 194 beaker.cache.super_short_term.key_length = 256
190 195
191 196 beaker.cache.short_term.type=memory
192 197 beaker.cache.short_term.expire=60
193 198 beaker.cache.short_term.key_length = 256
194 199
195 200 beaker.cache.long_term.type=memory
196 201 beaker.cache.long_term.expire=36000
197 202 beaker.cache.long_term.key_length = 256
198 203
199 204 beaker.cache.sql_cache_short.type=memory
200 205 beaker.cache.sql_cache_short.expire=10
201 206 beaker.cache.sql_cache_short.key_length = 256
202 207
203 208 beaker.cache.sql_cache_med.type=memory
204 209 beaker.cache.sql_cache_med.expire=360
205 210 beaker.cache.sql_cache_med.key_length = 256
206 211
207 212 beaker.cache.sql_cache_long.type=file
208 213 beaker.cache.sql_cache_long.expire=3600
209 214 beaker.cache.sql_cache_long.key_length = 256
210 215
211 216 ####################################
212 217 ### BEAKER SESSION ####
213 218 ####################################
214 219 ## Type of storage used for the session, current types are
215 220 ## dbm, file, memcached, database, and memory.
216 221 ## The storage uses the Container API
217 222 ## that is also used by the cache system.
218 223
219 224 ## db session ##
220 225 #beaker.session.type = ext:database
221 226 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
222 227 #beaker.session.table_name = db_session
223 228
224 229 ## encrypted cookie client side session, good for many instances ##
225 230 #beaker.session.type = cookie
226 231
227 232 ## file based cookies (default) ##
228 233 #beaker.session.type = file
229 234
230 235
231 236 beaker.session.key = rhodecode
232 237 ## secure cookie requires AES python libraries ##
233 238 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
234 239 #beaker.session.validate_key = 9712sds2212c--zxc123
235 240 ## sets session as invalid if it haven't been accessed for given amount of time
236 241 beaker.session.timeout = 2592000
237 242 beaker.session.httponly = true
238 243 #beaker.session.cookie_path = /<your-prefix>
239 244
240 245 ## uncomment for https secure cookie ##
241 246 beaker.session.secure = false
242 247
243 248 ## auto save the session to not to use .save() ##
244 249 beaker.session.auto = False
245 250
246 251 ## default cookie expiration time in seconds `true` expire at browser close ##
247 252 #beaker.session.cookie_expires = 3600
248 253
249 254
250 255 ############################
251 256 ## ERROR HANDLING SYSTEMS ##
252 257 ############################
253 258
254 259 ####################
255 260 ### [errormator] ###
256 261 ####################
257 262
258 263 # Errormator is tailored to work with RhodeCode, see
259 264 # http://errormator.com for details how to obtain an account
260 265 # you must install python package `errormator_client` to make it work
261 266
262 267 # errormator enabled
263 268 errormator = true
264 269
265 270 errormator.server_url = https://api.errormator.com
266 271 errormator.api_key = YOUR_API_KEY
267 272
268 273 # TWEAK AMOUNT OF INFO SENT HERE
269 274
270 275 # enables 404 error logging (default False)
271 276 errormator.report_404 = false
272 277
273 278 # time in seconds after request is considered being slow (default 1)
274 279 errormator.slow_request_time = 1
275 280
276 281 # record slow requests in application
277 282 # (needs to be enabled for slow datastore recording and time tracking)
278 283 errormator.slow_requests = true
279 284
280 285 # enable hooking to application loggers
281 286 # errormator.logging = true
282 287
283 288 # minimum log level for log capture
284 289 # errormator.logging.level = WARNING
285 290
286 291 # send logs only from erroneous/slow requests
287 292 # (saves API quota for intensive logging)
288 293 errormator.logging_on_error = false
289 294
290 295 # list of additonal keywords that should be grabbed from environ object
291 296 # can be string with comma separated list of words in lowercase
292 297 # (by default client will always send following info:
293 298 # 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
294 299 # start with HTTP* this list be extended with additional keywords here
295 300 errormator.environ_keys_whitelist =
296 301
297 302
298 303 # list of keywords that should be blanked from request object
299 304 # can be string with comma separated list of words in lowercase
300 305 # (by default client will always blank keys that contain following words
301 306 # 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
302 307 # this list be extended with additional keywords set here
303 308 errormator.request_keys_blacklist =
304 309
305 310
306 311 # list of namespaces that should be ignores when gathering log entries
307 312 # can be string with comma separated list of namespaces
308 313 # (by default the client ignores own entries: errormator_client.client)
309 314 errormator.log_namespace_blacklist =
310 315
311 316
312 317 ################
313 318 ### [sentry] ###
314 319 ################
315 320
316 321 # sentry is a alternative open source error aggregator
317 322 # you must install python packages `sentry` and `raven` to enable
318 323
319 324 sentry.dsn = YOUR_DNS
320 325 sentry.servers =
321 326 sentry.name =
322 327 sentry.key =
323 328 sentry.public_key =
324 329 sentry.secret_key =
325 330 sentry.project =
326 331 sentry.site =
327 332 sentry.include_paths =
328 333 sentry.exclude_paths =
329 334
330 335
331 336 ################################################################################
332 337 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
333 338 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
334 339 ## execute malicious code after an exception is raised. ##
335 340 ################################################################################
336 341 #set debug = false
337 342
338 343 ##################################
339 344 ### LOGVIEW CONFIG ###
340 345 ##################################
341 346 logview.sqlalchemy = #faa
342 347 logview.pylons.templating = #bfb
343 348 logview.pylons.util = #eee
344 349
345 350 #########################################################
346 351 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
347 352 #########################################################
348 353 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
349 354 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
350 355 sqlalchemy.db1.echo = false
351 356 sqlalchemy.db1.pool_recycle = 3600
352 357 sqlalchemy.db1.convert_unicode = true
353 358
354 359 ################################
355 360 ### LOGGING CONFIGURATION ####
356 361 ################################
357 362 [loggers]
358 363 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
359 364
360 365 [handlers]
361 366 keys = console, console_sql
362 367
363 368 [formatters]
364 369 keys = generic, color_formatter, color_formatter_sql
365 370
366 371 #############
367 372 ## LOGGERS ##
368 373 #############
369 374 [logger_root]
370 375 level = NOTSET
371 376 handlers = console
372 377
373 378 [logger_routes]
374 379 level = DEBUG
375 380 handlers =
376 381 qualname = routes.middleware
377 382 # "level = DEBUG" logs the route matched and routing variables.
378 383 propagate = 1
379 384
380 385 [logger_beaker]
381 386 level = DEBUG
382 387 handlers =
383 388 qualname = beaker.container
384 389 propagate = 1
385 390
386 391 [logger_templates]
387 392 level = INFO
388 393 handlers =
389 394 qualname = pylons.templating
390 395 propagate = 1
391 396
392 397 [logger_rhodecode]
393 398 level = DEBUG
394 399 handlers =
395 400 qualname = rhodecode
396 401 propagate = 1
397 402
398 403 [logger_sqlalchemy]
399 404 level = INFO
400 405 handlers = console_sql
401 406 qualname = sqlalchemy.engine
402 407 propagate = 0
403 408
404 409 [logger_whoosh_indexer]
405 410 level = DEBUG
406 411 handlers =
407 412 qualname = whoosh_indexer
408 413 propagate = 1
409 414
410 415 ##############
411 416 ## HANDLERS ##
412 417 ##############
413 418
414 419 [handler_console]
415 420 class = StreamHandler
416 421 args = (sys.stderr,)
417 422 level = DEBUG
418 423 formatter = color_formatter
419 424
420 425 [handler_console_sql]
421 426 class = StreamHandler
422 427 args = (sys.stderr,)
423 428 level = DEBUG
424 429 formatter = color_formatter_sql
425 430
426 431 ################
427 432 ## FORMATTERS ##
428 433 ################
429 434
430 435 [formatter_generic]
431 436 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
432 437 datefmt = %Y-%m-%d %H:%M:%S
433 438
434 439 [formatter_color_formatter]
435 440 class=rhodecode.lib.colored_formatter.ColorFormatter
436 441 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
437 442 datefmt = %Y-%m-%d %H:%M:%S
438 443
439 444 [formatter_color_formatter_sql]
440 445 class=rhodecode.lib.colored_formatter.ColorFormatterSql
441 446 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
442 447 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,442 +1,447 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # RhodeCode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 pdebug = false
11 11 ################################################################################
12 12 ## Uncomment and replace with the address which should receive ##
13 13 ## any error reports after application crash ##
14 14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 15 ################################################################################
16 16 #email_to = admin@localhost
17 17 #error_email_from = paste_error@localhost
18 18 #app_email_from = rhodecode-noreply@localhost
19 19 #error_message =
20 20 #email_prefix = [RhodeCode]
21 21
22 22 #smtp_server = mail.server.com
23 23 #smtp_username =
24 24 #smtp_password =
25 25 #smtp_port =
26 26 #smtp_use_tls = false
27 27 #smtp_use_ssl = true
28 28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 29 #smtp_auth =
30 30
31 31 [server:main]
32 32 ## PASTE
33 33 ##nr of threads to spawn
34 34 #threadpool_workers = 5
35 35
36 36 ##max request before thread respawn
37 37 #threadpool_max_requests = 10
38 38
39 39 ##option to use threads of process
40 40 #use_threadpool = true
41 41
42 42 #use = egg:Paste#http
43 43
44 44 #WAITRESS
45 45 threads = 5
46 #100GB
47 max_request_body_size = 107374182400
46 48 use = egg:waitress#main
47 49
48 50 host = 127.0.0.1
49 51 port = 8001
50 52
51 53 [filter:proxy-prefix]
52 54 # prefix middleware for rc
53 55 use = egg:PasteDeploy#prefix
54 56 prefix = /<your-prefix>
55 57
56 58 [app:main]
57 59 use = egg:rhodecode
58 60 #filter-with = proxy-prefix
59 61 full_stack = true
60 62 static_files = true
61 63 # Optional Languages
62 64 # en, fr, ja, pt_BR, zh_CN, zh_TW, pl
63 65 lang = en
64 66 cache_dir = %(here)s/data
65 67 index_dir = %(here)s/data/index
66 68 app_instance_uuid = rc-production
67 69 cut_off_limit = 256000
68 70 vcs_full_cache = True
69 71 # force https in RhodeCode, fixes https redirects, assumes it's always https
70 72 force_https = false
71 73 # use Strict-Transport-Security headers
72 74 use_htsts = false
73 75 commit_parse_limit = 50
74 76 # number of items displayed in lightweight dashboard before paginating
75 77 dashboard_items = 100
76 78 use_gravatar = true
77 79
80 # path to git executable
81 git_path = git
82
78 83 ## RSS feed options
79 84
80 85 rss_cut_off_limit = 256000
81 86 rss_items_per_page = 10
82 87 rss_include_diff = false
83 88
84 89
85 90 ## alternative_gravatar_url allows you to use your own avatar server application
86 91 ## the following parts of the URL will be replaced
87 92 ## {email} user email
88 93 ## {md5email} md5 hash of the user email (like at gravatar.com)
89 94 ## {size} size of the image that is expected from the server application
90 95 ## {scheme} http/https from RhodeCode server
91 96 ## {netloc} network location from RhodeCode server
92 97 #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
93 98 #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
94 99
95 100 container_auth_enabled = false
96 101 proxypass_auth_enabled = false
97 102 ## default encoding used to convert from and to unicode
98 103 ## can be also a comma seperated list of encoding in case of mixed encodings
99 104 default_encoding = utf8
100 105
101 106 ## overwrite schema of clone url
102 107 ## available vars:
103 108 ## scheme - http/https
104 109 ## user - current user
105 110 ## pass - password
106 111 ## netloc - network location
107 112 ## path - usually repo_name
108 113
109 114 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
110 115
111 116 ## issue tracking mapping for commits messages
112 117 ## comment out issue_pat, issue_server, issue_prefix to enable
113 118
114 119 ## pattern to get the issues from commit messages
115 120 ## default one used here is #<numbers> with a regex passive group for `#`
116 121 ## {id} will be all groups matched from this pattern
117 122
118 123 issue_pat = (?:\s*#)(\d+)
119 124
120 125 ## server url to the issue, each {id} will be replaced with match
121 126 ## fetched from the regex and {repo} is replaced with full repository name
122 127 ## including groups {repo_name} is replaced with just name of repo
123 128
124 129 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
125 130
126 131 ## prefix to add to link to indicate it's an url
127 132 ## #314 will be replaced by <issue_prefix><id>
128 133
129 134 issue_prefix = #
130 135
131 136 ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
132 137 ## multiple patterns, to other issues server, wiki or others
133 138 ## below an example how to create a wiki pattern
134 139 # #wiki-some-id -> https://mywiki.com/some-id
135 140
136 141 #issue_pat_wiki = (?:wiki-)(.+)
137 142 #issue_server_link_wiki = https://mywiki.com/{id}
138 143 #issue_prefix_wiki = WIKI-
139 144
140 145
141 146 ## instance-id prefix
142 147 ## a prefix key for this instance used for cache invalidation when running
143 148 ## multiple instances of rhodecode, make sure it's globally unique for
144 149 ## all running rhodecode instances. Leave empty if you don't use it
145 150 instance_id =
146 151
147 152 ## alternative return HTTP header for failed authentication. Default HTTP
148 153 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
149 154 ## handling that. Set this variable to 403 to return HTTPForbidden
150 155 auth_ret_code =
151 156
152 157 ####################################
153 158 ### CELERY CONFIG ####
154 159 ####################################
155 160 use_celery = false
156 161 broker.host = localhost
157 162 broker.vhost = rabbitmqhost
158 163 broker.port = 5672
159 164 broker.user = rabbitmq
160 165 broker.password = qweqwe
161 166
162 167 celery.imports = rhodecode.lib.celerylib.tasks
163 168
164 169 celery.result.backend = amqp
165 170 celery.result.dburi = amqp://
166 171 celery.result.serialier = json
167 172
168 173 #celery.send.task.error.emails = true
169 174 #celery.amqp.task.result.expires = 18000
170 175
171 176 celeryd.concurrency = 2
172 177 #celeryd.log.file = celeryd.log
173 178 celeryd.log.level = debug
174 179 celeryd.max.tasks.per.child = 1
175 180
176 181 #tasks will never be sent to the queue, but executed locally instead.
177 182 celery.always.eager = false
178 183
179 184 ####################################
180 185 ### BEAKER CACHE ####
181 186 ####################################
182 187 beaker.cache.data_dir=%(here)s/data/cache/data
183 188 beaker.cache.lock_dir=%(here)s/data/cache/lock
184 189
185 190 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
186 191
187 192 beaker.cache.super_short_term.type=memory
188 193 beaker.cache.super_short_term.expire=10
189 194 beaker.cache.super_short_term.key_length = 256
190 195
191 196 beaker.cache.short_term.type=memory
192 197 beaker.cache.short_term.expire=60
193 198 beaker.cache.short_term.key_length = 256
194 199
195 200 beaker.cache.long_term.type=memory
196 201 beaker.cache.long_term.expire=36000
197 202 beaker.cache.long_term.key_length = 256
198 203
199 204 beaker.cache.sql_cache_short.type=memory
200 205 beaker.cache.sql_cache_short.expire=10
201 206 beaker.cache.sql_cache_short.key_length = 256
202 207
203 208 beaker.cache.sql_cache_med.type=memory
204 209 beaker.cache.sql_cache_med.expire=360
205 210 beaker.cache.sql_cache_med.key_length = 256
206 211
207 212 beaker.cache.sql_cache_long.type=file
208 213 beaker.cache.sql_cache_long.expire=3600
209 214 beaker.cache.sql_cache_long.key_length = 256
210 215
211 216 ####################################
212 217 ### BEAKER SESSION ####
213 218 ####################################
214 219 ## Type of storage used for the session, current types are
215 220 ## dbm, file, memcached, database, and memory.
216 221 ## The storage uses the Container API
217 222 ## that is also used by the cache system.
218 223
219 224 ## db session ##
220 225 #beaker.session.type = ext:database
221 226 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
222 227 #beaker.session.table_name = db_session
223 228
224 229 ## encrypted cookie client side session, good for many instances ##
225 230 #beaker.session.type = cookie
226 231
227 232 ## file based cookies (default) ##
228 233 #beaker.session.type = file
229 234
230 235
231 236 beaker.session.key = rhodecode
232 237 ## secure cookie requires AES python libraries ##
233 238 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
234 239 #beaker.session.validate_key = 9712sds2212c--zxc123
235 240 ## sets session as invalid if it haven't been accessed for given amount of time
236 241 beaker.session.timeout = 2592000
237 242 beaker.session.httponly = true
238 243 #beaker.session.cookie_path = /<your-prefix>
239 244
240 245 ## uncomment for https secure cookie ##
241 246 beaker.session.secure = false
242 247
243 248 ## auto save the session to not to use .save() ##
244 249 beaker.session.auto = False
245 250
246 251 ## default cookie expiration time in seconds `true` expire at browser close ##
247 252 #beaker.session.cookie_expires = 3600
248 253
249 254
250 255 ############################
251 256 ## ERROR HANDLING SYSTEMS ##
252 257 ############################
253 258
254 259 ####################
255 260 ### [errormator] ###
256 261 ####################
257 262
258 263 # Errormator is tailored to work with RhodeCode, see
259 264 # http://errormator.com for details how to obtain an account
260 265 # you must install python package `errormator_client` to make it work
261 266
262 267 # errormator enabled
263 268 errormator = true
264 269
265 270 errormator.server_url = https://api.errormator.com
266 271 errormator.api_key = YOUR_API_KEY
267 272
268 273 # TWEAK AMOUNT OF INFO SENT HERE
269 274
270 275 # enables 404 error logging (default False)
271 276 errormator.report_404 = false
272 277
273 278 # time in seconds after request is considered being slow (default 1)
274 279 errormator.slow_request_time = 1
275 280
276 281 # record slow requests in application
277 282 # (needs to be enabled for slow datastore recording and time tracking)
278 283 errormator.slow_requests = true
279 284
280 285 # enable hooking to application loggers
281 286 # errormator.logging = true
282 287
283 288 # minimum log level for log capture
284 289 # errormator.logging.level = WARNING
285 290
286 291 # send logs only from erroneous/slow requests
287 292 # (saves API quota for intensive logging)
288 293 errormator.logging_on_error = false
289 294
290 295 # list of additonal keywords that should be grabbed from environ object
291 296 # can be string with comma separated list of words in lowercase
292 297 # (by default client will always send following info:
293 298 # 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
294 299 # start with HTTP* this list be extended with additional keywords here
295 300 errormator.environ_keys_whitelist =
296 301
297 302
298 303 # list of keywords that should be blanked from request object
299 304 # can be string with comma separated list of words in lowercase
300 305 # (by default client will always blank keys that contain following words
301 306 # 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
302 307 # this list be extended with additional keywords set here
303 308 errormator.request_keys_blacklist =
304 309
305 310
306 311 # list of namespaces that should be ignores when gathering log entries
307 312 # can be string with comma separated list of namespaces
308 313 # (by default the client ignores own entries: errormator_client.client)
309 314 errormator.log_namespace_blacklist =
310 315
311 316
312 317 ################
313 318 ### [sentry] ###
314 319 ################
315 320
316 321 # sentry is a alternative open source error aggregator
317 322 # you must install python packages `sentry` and `raven` to enable
318 323
319 324 sentry.dsn = YOUR_DNS
320 325 sentry.servers =
321 326 sentry.name =
322 327 sentry.key =
323 328 sentry.public_key =
324 329 sentry.secret_key =
325 330 sentry.project =
326 331 sentry.site =
327 332 sentry.include_paths =
328 333 sentry.exclude_paths =
329 334
330 335
331 336 ################################################################################
332 337 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
333 338 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
334 339 ## execute malicious code after an exception is raised. ##
335 340 ################################################################################
336 341 set debug = false
337 342
338 343 ##################################
339 344 ### LOGVIEW CONFIG ###
340 345 ##################################
341 346 logview.sqlalchemy = #faa
342 347 logview.pylons.templating = #bfb
343 348 logview.pylons.util = #eee
344 349
345 350 #########################################################
346 351 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
347 352 #########################################################
348 353 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
349 354 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
350 355 sqlalchemy.db1.echo = false
351 356 sqlalchemy.db1.pool_recycle = 3600
352 357 sqlalchemy.db1.convert_unicode = true
353 358
354 359 ################################
355 360 ### LOGGING CONFIGURATION ####
356 361 ################################
357 362 [loggers]
358 363 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
359 364
360 365 [handlers]
361 366 keys = console, console_sql
362 367
363 368 [formatters]
364 369 keys = generic, color_formatter, color_formatter_sql
365 370
366 371 #############
367 372 ## LOGGERS ##
368 373 #############
369 374 [logger_root]
370 375 level = NOTSET
371 376 handlers = console
372 377
373 378 [logger_routes]
374 379 level = DEBUG
375 380 handlers =
376 381 qualname = routes.middleware
377 382 # "level = DEBUG" logs the route matched and routing variables.
378 383 propagate = 1
379 384
380 385 [logger_beaker]
381 386 level = DEBUG
382 387 handlers =
383 388 qualname = beaker.container
384 389 propagate = 1
385 390
386 391 [logger_templates]
387 392 level = INFO
388 393 handlers =
389 394 qualname = pylons.templating
390 395 propagate = 1
391 396
392 397 [logger_rhodecode]
393 398 level = DEBUG
394 399 handlers =
395 400 qualname = rhodecode
396 401 propagate = 1
397 402
398 403 [logger_sqlalchemy]
399 404 level = INFO
400 405 handlers = console_sql
401 406 qualname = sqlalchemy.engine
402 407 propagate = 0
403 408
404 409 [logger_whoosh_indexer]
405 410 level = DEBUG
406 411 handlers =
407 412 qualname = whoosh_indexer
408 413 propagate = 1
409 414
410 415 ##############
411 416 ## HANDLERS ##
412 417 ##############
413 418
414 419 [handler_console]
415 420 class = StreamHandler
416 421 args = (sys.stderr,)
417 422 level = INFO
418 423 formatter = generic
419 424
420 425 [handler_console_sql]
421 426 class = StreamHandler
422 427 args = (sys.stderr,)
423 428 level = WARN
424 429 formatter = generic
425 430
426 431 ################
427 432 ## FORMATTERS ##
428 433 ################
429 434
430 435 [formatter_generic]
431 436 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
432 437 datefmt = %Y-%m-%d %H:%M:%S
433 438
434 439 [formatter_color_formatter]
435 440 class=rhodecode.lib.colored_formatter.ColorFormatter
436 441 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
437 442 datefmt = %Y-%m-%d %H:%M:%S
438 443
439 444 [formatter_color_formatter_sql]
440 445 class=rhodecode.lib.colored_formatter.ColorFormatterSql
441 446 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
442 447 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,452 +1,457 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # RhodeCode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 pdebug = false
11 11 ################################################################################
12 12 ## Uncomment and replace with the address which should receive ##
13 13 ## any error reports after application crash ##
14 14 ## Additionally those settings will be used by RhodeCode mailing system ##
15 15 ################################################################################
16 16 #email_to = admin@localhost
17 17 #error_email_from = paste_error@localhost
18 18 #app_email_from = rhodecode-noreply@localhost
19 19 #error_message =
20 20 #email_prefix = [RhodeCode]
21 21
22 22 #smtp_server = mail.server.com
23 23 #smtp_username =
24 24 #smtp_password =
25 25 #smtp_port =
26 26 #smtp_use_tls = false
27 27 #smtp_use_ssl = true
28 28 # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
29 29 #smtp_auth =
30 30
31 31 [server:main]
32 32 ## PASTE
33 33 ##nr of threads to spawn
34 34 #threadpool_workers = 5
35 35
36 36 ##max request before thread respawn
37 37 #threadpool_max_requests = 10
38 38
39 39 ##option to use threads of process
40 40 #use_threadpool = true
41 41
42 42 #use = egg:Paste#http
43 43
44 44 #WAITRESS
45 45 threads = 5
46 #100GB
47 max_request_body_size = 107374182400
46 48 use = egg:waitress#main
47 49
48 50 host = 127.0.0.1
49 51 port = 5000
50 52
51 53 [filter:proxy-prefix]
52 54 # prefix middleware for rc
53 55 use = egg:PasteDeploy#prefix
54 56 prefix = /<your-prefix>
55 57
56 58 [app:main]
57 59 use = egg:rhodecode
58 60 #filter-with = proxy-prefix
59 61 full_stack = true
60 62 static_files = true
61 63 # Optional Languages
62 64 # en, fr, ja, pt_BR, zh_CN, zh_TW, pl
63 65 lang = en
64 66 cache_dir = %(here)s/data
65 67 index_dir = %(here)s/data/index
66 68 app_instance_uuid = ${app_instance_uuid}
67 69 cut_off_limit = 256000
68 70 vcs_full_cache = True
69 71 # force https in RhodeCode, fixes https redirects, assumes it's always https
70 72 force_https = false
71 73 # use Strict-Transport-Security headers
72 74 use_htsts = false
73 75 commit_parse_limit = 50
74 76 # number of items displayed in lightweight dashboard before paginating
75 77 dashboard_items = 100
76 78 use_gravatar = true
77 79
80 # path to git executable
81 git_path = git
82
78 83 ## RSS feed options
79 84
80 85 rss_cut_off_limit = 256000
81 86 rss_items_per_page = 10
82 87 rss_include_diff = false
83 88
84 89
85 90 ## alternative_gravatar_url allows you to use your own avatar server application
86 91 ## the following parts of the URL will be replaced
87 92 ## {email} user email
88 93 ## {md5email} md5 hash of the user email (like at gravatar.com)
89 94 ## {size} size of the image that is expected from the server application
90 95 ## {scheme} http/https from RhodeCode server
91 96 ## {netloc} network location from RhodeCode server
92 97 #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
93 98 #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
94 99
95 100 container_auth_enabled = false
96 101 proxypass_auth_enabled = false
97 102 ## default encoding used to convert from and to unicode
98 103 ## can be also a comma seperated list of encoding in case of mixed encodings
99 104 default_encoding = utf8
100 105
101 106 ## overwrite schema of clone url
102 107 ## available vars:
103 108 ## scheme - http/https
104 109 ## user - current user
105 110 ## pass - password
106 111 ## netloc - network location
107 112 ## path - usually repo_name
108 113
109 114 #clone_uri = {scheme}://{user}{pass}{netloc}{path}
110 115
111 116 ## issue tracking mapping for commits messages
112 117 ## comment out issue_pat, issue_server, issue_prefix to enable
113 118
114 119 ## pattern to get the issues from commit messages
115 120 ## default one used here is #<numbers> with a regex passive group for `#`
116 121 ## {id} will be all groups matched from this pattern
117 122
118 123 issue_pat = (?:\s*#)(\d+)
119 124
120 125 ## server url to the issue, each {id} will be replaced with match
121 126 ## fetched from the regex and {repo} is replaced with full repository name
122 127 ## including groups {repo_name} is replaced with just name of repo
123 128
124 129 issue_server_link = https://myissueserver.com/{repo}/issue/{id}
125 130
126 131 ## prefix to add to link to indicate it's an url
127 132 ## #314 will be replaced by <issue_prefix><id>
128 133
129 134 issue_prefix = #
130 135
131 136 ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
132 137 ## multiple patterns, to other issues server, wiki or others
133 138 ## below an example how to create a wiki pattern
134 139 # #wiki-some-id -> https://mywiki.com/some-id
135 140
136 141 #issue_pat_wiki = (?:wiki-)(.+)
137 142 #issue_server_link_wiki = https://mywiki.com/{id}
138 143 #issue_prefix_wiki = WIKI-
139 144
140 145
141 146 ## instance-id prefix
142 147 ## a prefix key for this instance used for cache invalidation when running
143 148 ## multiple instances of rhodecode, make sure it's globally unique for
144 149 ## all running rhodecode instances. Leave empty if you don't use it
145 150 instance_id =
146 151
147 152 ## alternative return HTTP header for failed authentication. Default HTTP
148 153 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
149 154 ## handling that. Set this variable to 403 to return HTTPForbidden
150 155 auth_ret_code =
151 156
152 157 ####################################
153 158 ### CELERY CONFIG ####
154 159 ####################################
155 160 use_celery = false
156 161 broker.host = localhost
157 162 broker.vhost = rabbitmqhost
158 163 broker.port = 5672
159 164 broker.user = rabbitmq
160 165 broker.password = qweqwe
161 166
162 167 celery.imports = rhodecode.lib.celerylib.tasks
163 168
164 169 celery.result.backend = amqp
165 170 celery.result.dburi = amqp://
166 171 celery.result.serialier = json
167 172
168 173 #celery.send.task.error.emails = true
169 174 #celery.amqp.task.result.expires = 18000
170 175
171 176 celeryd.concurrency = 2
172 177 #celeryd.log.file = celeryd.log
173 178 celeryd.log.level = debug
174 179 celeryd.max.tasks.per.child = 1
175 180
176 181 #tasks will never be sent to the queue, but executed locally instead.
177 182 celery.always.eager = false
178 183
179 184 ####################################
180 185 ### BEAKER CACHE ####
181 186 ####################################
182 187 beaker.cache.data_dir=%(here)s/data/cache/data
183 188 beaker.cache.lock_dir=%(here)s/data/cache/lock
184 189
185 190 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
186 191
187 192 beaker.cache.super_short_term.type=memory
188 193 beaker.cache.super_short_term.expire=10
189 194 beaker.cache.super_short_term.key_length = 256
190 195
191 196 beaker.cache.short_term.type=memory
192 197 beaker.cache.short_term.expire=60
193 198 beaker.cache.short_term.key_length = 256
194 199
195 200 beaker.cache.long_term.type=memory
196 201 beaker.cache.long_term.expire=36000
197 202 beaker.cache.long_term.key_length = 256
198 203
199 204 beaker.cache.sql_cache_short.type=memory
200 205 beaker.cache.sql_cache_short.expire=10
201 206 beaker.cache.sql_cache_short.key_length = 256
202 207
203 208 beaker.cache.sql_cache_med.type=memory
204 209 beaker.cache.sql_cache_med.expire=360
205 210 beaker.cache.sql_cache_med.key_length = 256
206 211
207 212 beaker.cache.sql_cache_long.type=file
208 213 beaker.cache.sql_cache_long.expire=3600
209 214 beaker.cache.sql_cache_long.key_length = 256
210 215
211 216 ####################################
212 217 ### BEAKER SESSION ####
213 218 ####################################
214 219 ## Type of storage used for the session, current types are
215 220 ## dbm, file, memcached, database, and memory.
216 221 ## The storage uses the Container API
217 222 ## that is also used by the cache system.
218 223
219 224 ## db session ##
220 225 #beaker.session.type = ext:database
221 226 #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
222 227 #beaker.session.table_name = db_session
223 228
224 229 ## encrypted cookie client side session, good for many instances ##
225 230 #beaker.session.type = cookie
226 231
227 232 ## file based cookies (default) ##
228 233 #beaker.session.type = file
229 234
230 235
231 236 beaker.session.key = rhodecode
232 237 ## secure cookie requires AES python libraries ##
233 238 #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
234 239 #beaker.session.validate_key = 9712sds2212c--zxc123
235 240 ## sets session as invalid if it haven't been accessed for given amount of time
236 241 beaker.session.timeout = 2592000
237 242 beaker.session.httponly = true
238 243 #beaker.session.cookie_path = /<your-prefix>
239 244
240 245 ## uncomment for https secure cookie ##
241 246 beaker.session.secure = false
242 247
243 248 ## auto save the session to not to use .save() ##
244 249 beaker.session.auto = False
245 250
246 251 ## default cookie expiration time in seconds `true` expire at browser close ##
247 252 #beaker.session.cookie_expires = 3600
248 253
249 254
250 255 ############################
251 256 ## ERROR HANDLING SYSTEMS ##
252 257 ############################
253 258
254 259 ####################
255 260 ### [errormator] ###
256 261 ####################
257 262
258 263 # Errormator is tailored to work with RhodeCode, see
259 264 # http://errormator.com for details how to obtain an account
260 265 # you must install python package `errormator_client` to make it work
261 266
262 267 # errormator enabled
263 268 errormator = true
264 269
265 270 errormator.server_url = https://api.errormator.com
266 271 errormator.api_key = YOUR_API_KEY
267 272
268 273 # TWEAK AMOUNT OF INFO SENT HERE
269 274
270 275 # enables 404 error logging (default False)
271 276 errormator.report_404 = false
272 277
273 278 # time in seconds after request is considered being slow (default 1)
274 279 errormator.slow_request_time = 1
275 280
276 281 # record slow requests in application
277 282 # (needs to be enabled for slow datastore recording and time tracking)
278 283 errormator.slow_requests = true
279 284
280 285 # enable hooking to application loggers
281 286 # errormator.logging = true
282 287
283 288 # minimum log level for log capture
284 289 # errormator.logging.level = WARNING
285 290
286 291 # send logs only from erroneous/slow requests
287 292 # (saves API quota for intensive logging)
288 293 errormator.logging_on_error = false
289 294
290 295 # list of additonal keywords that should be grabbed from environ object
291 296 # can be string with comma separated list of words in lowercase
292 297 # (by default client will always send following info:
293 298 # 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
294 299 # start with HTTP* this list be extended with additional keywords here
295 300 errormator.environ_keys_whitelist =
296 301
297 302
298 303 # list of keywords that should be blanked from request object
299 304 # can be string with comma separated list of words in lowercase
300 305 # (by default client will always blank keys that contain following words
301 306 # 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
302 307 # this list be extended with additional keywords set here
303 308 errormator.request_keys_blacklist =
304 309
305 310
306 311 # list of namespaces that should be ignores when gathering log entries
307 312 # can be string with comma separated list of namespaces
308 313 # (by default the client ignores own entries: errormator_client.client)
309 314 errormator.log_namespace_blacklist =
310 315
311 316
312 317 ################
313 318 ### [sentry] ###
314 319 ################
315 320
316 321 # sentry is a alternative open source error aggregator
317 322 # you must install python packages `sentry` and `raven` to enable
318 323
319 324 sentry.dsn = YOUR_DNS
320 325 sentry.servers =
321 326 sentry.name =
322 327 sentry.key =
323 328 sentry.public_key =
324 329 sentry.secret_key =
325 330 sentry.project =
326 331 sentry.site =
327 332 sentry.include_paths =
328 333 sentry.exclude_paths =
329 334
330 335
331 336 ################################################################################
332 337 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
333 338 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
334 339 ## execute malicious code after an exception is raised. ##
335 340 ################################################################################
336 341 set debug = false
337 342
338 343 ##################################
339 344 ### LOGVIEW CONFIG ###
340 345 ##################################
341 346 logview.sqlalchemy = #faa
342 347 logview.pylons.templating = #bfb
343 348 logview.pylons.util = #eee
344 349
345 350 #########################################################
346 351 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
347 352 #########################################################
348 353
349 354 # SQLITE [default]
350 355 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
351 356
352 357 # POSTGRESQL
353 358 # sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode
354 359
355 360 # MySQL
356 361 # sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode
357 362
358 363 # see sqlalchemy docs for others
359 364
360 365 sqlalchemy.db1.echo = false
361 366 sqlalchemy.db1.pool_recycle = 3600
362 367 sqlalchemy.db1.convert_unicode = true
363 368
364 369 ################################
365 370 ### LOGGING CONFIGURATION ####
366 371 ################################
367 372 [loggers]
368 373 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
369 374
370 375 [handlers]
371 376 keys = console, console_sql
372 377
373 378 [formatters]
374 379 keys = generic, color_formatter, color_formatter_sql
375 380
376 381 #############
377 382 ## LOGGERS ##
378 383 #############
379 384 [logger_root]
380 385 level = NOTSET
381 386 handlers = console
382 387
383 388 [logger_routes]
384 389 level = DEBUG
385 390 handlers =
386 391 qualname = routes.middleware
387 392 # "level = DEBUG" logs the route matched and routing variables.
388 393 propagate = 1
389 394
390 395 [logger_beaker]
391 396 level = DEBUG
392 397 handlers =
393 398 qualname = beaker.container
394 399 propagate = 1
395 400
396 401 [logger_templates]
397 402 level = INFO
398 403 handlers =
399 404 qualname = pylons.templating
400 405 propagate = 1
401 406
402 407 [logger_rhodecode]
403 408 level = DEBUG
404 409 handlers =
405 410 qualname = rhodecode
406 411 propagate = 1
407 412
408 413 [logger_sqlalchemy]
409 414 level = INFO
410 415 handlers = console_sql
411 416 qualname = sqlalchemy.engine
412 417 propagate = 0
413 418
414 419 [logger_whoosh_indexer]
415 420 level = DEBUG
416 421 handlers =
417 422 qualname = whoosh_indexer
418 423 propagate = 1
419 424
420 425 ##############
421 426 ## HANDLERS ##
422 427 ##############
423 428
424 429 [handler_console]
425 430 class = StreamHandler
426 431 args = (sys.stderr,)
427 432 level = INFO
428 433 formatter = generic
429 434
430 435 [handler_console_sql]
431 436 class = StreamHandler
432 437 args = (sys.stderr,)
433 438 level = WARN
434 439 formatter = generic
435 440
436 441 ################
437 442 ## FORMATTERS ##
438 443 ################
439 444
440 445 [formatter_generic]
441 446 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
442 447 datefmt = %Y-%m-%d %H:%M:%S
443 448
444 449 [formatter_color_formatter]
445 450 class=rhodecode.lib.colored_formatter.ColorFormatter
446 451 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
447 452 datefmt = %Y-%m-%d %H:%M:%S
448 453
449 454 [formatter_color_formatter_sql]
450 455 class=rhodecode.lib.colored_formatter.ColorFormatterSql
451 456 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
452 457 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,200 +1,203 b''
1 1 import os
2 2 import socket
3 3 import logging
4 4 import subprocess
5 5 import traceback
6 6
7 7 from webob import Request, Response, exc
8 8
9 import rhodecode
9 10 from rhodecode.lib import subprocessio
10 11
11 12 log = logging.getLogger(__name__)
12 13
13 14
14 15 class FileWrapper(object):
15 16
16 17 def __init__(self, fd, content_length):
17 18 self.fd = fd
18 19 self.content_length = content_length
19 20 self.remain = content_length
20 21
21 22 def read(self, size):
22 23 if size <= self.remain:
23 24 try:
24 25 data = self.fd.read(size)
25 26 except socket.error:
26 27 raise IOError(self)
27 28 self.remain -= size
28 29 elif self.remain:
29 30 data = self.fd.read(self.remain)
30 31 self.remain = 0
31 32 else:
32 33 data = None
33 34 return data
34 35
35 36 def __repr__(self):
36 37 return '<FileWrapper %s len: %s, read: %s>' % (
37 38 self.fd, self.content_length, self.content_length - self.remain
38 39 )
39 40
40 41
41 42 class GitRepository(object):
42 43 git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
43 44 commands = ['git-upload-pack', 'git-receive-pack']
44 45
45 46 def __init__(self, repo_name, content_path, extras):
46 47 files = set([f.lower() for f in os.listdir(content_path)])
47 48 if not (self.git_folder_signature.intersection(files)
48 49 == self.git_folder_signature):
49 50 raise OSError('%s missing git signature' % content_path)
50 51 self.content_path = content_path
51 52 self.valid_accepts = ['application/x-%s-result' %
52 53 c for c in self.commands]
53 54 self.repo_name = repo_name
54 55 self.extras = extras
55 56
56 57 def _get_fixedpath(self, path):
57 58 """
58 59 Small fix for repo_path
59 60
60 61 :param path:
61 62 :type path:
62 63 """
63 64 return path.split(self.repo_name, 1)[-1].strip('/')
64 65
65 66 def inforefs(self, request, environ):
66 67 """
67 68 WSGI Response producer for HTTP GET Git Smart
68 69 HTTP /info/refs request.
69 70 """
70 71
71 72 git_command = request.GET.get('service')
72 73 if git_command not in self.commands:
73 74 log.debug('command %s not allowed' % git_command)
74 75 return exc.HTTPMethodNotAllowed()
75 76
76 77 # note to self:
77 78 # please, resist the urge to add '\n' to git capture and increment
78 79 # line count by 1.
79 80 # The code in Git client not only does NOT need '\n', but actually
80 81 # blows up if you sprinkle "flush" (0000) as "0001\n".
81 82 # It reads binary, per number of bytes specified.
82 83 # if you do add '\n' as part of data, count it.
83 84 server_advert = '# service=%s' % git_command
84 85 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
86 _git_path = rhodecode.CONFIG.get('git_path', 'git')
85 87 try:
86 88 out = subprocessio.SubprocessIOChunker(
87 r'git %s --stateless-rpc --advertise-refs "%s"' % (
88 git_command[4:], self.content_path),
89 r'%s %s --stateless-rpc --advertise-refs "%s"' % (
90 _git_path, git_command[4:], self.content_path),
89 91 starting_values=[
90 92 packet_len + server_advert + '0000'
91 93 ]
92 94 )
93 95 except EnvironmentError, e:
94 96 log.error(traceback.format_exc())
95 97 raise exc.HTTPExpectationFailed()
96 98 resp = Response()
97 99 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
98 100 resp.charset = None
99 101 resp.app_iter = out
100 102 return resp
101 103
102 104 def backend(self, request, environ):
103 105 """
104 106 WSGI Response producer for HTTP POST Git Smart HTTP requests.
105 107 Reads commands and data from HTTP POST's body.
106 108 returns an iterator obj with contents of git command's
107 109 response to stdout
108 110 """
109 111 git_command = self._get_fixedpath(request.path_info)
110 112 if git_command not in self.commands:
111 113 log.debug('command %s not allowed' % git_command)
112 114 return exc.HTTPMethodNotAllowed()
113 115
114 116 if 'CONTENT_LENGTH' in environ:
115 117 inputstream = FileWrapper(environ['wsgi.input'],
116 118 request.content_length)
117 119 else:
118 120 inputstream = environ['wsgi.input']
119 121
120 122 try:
121 123 gitenv = os.environ
122 124 from rhodecode.lib.compat import json
123 125 gitenv['RHODECODE_EXTRAS'] = json.dumps(self.extras)
124 126 # forget all configs
125 127 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
126 128 opts = dict(
127 129 env=gitenv,
128 130 cwd=os.getcwd()
129 131 )
130 132 cmd = r'git %s --stateless-rpc "%s"' % (git_command[4:],
131 133 self.content_path),
132 134 log.debug('handling cmd %s' % cmd)
133 135 out = subprocessio.SubprocessIOChunker(
134 136 cmd,
135 137 inputstream=inputstream,
136 138 **opts
137 139 )
138 140 except EnvironmentError, e:
139 141 log.error(traceback.format_exc())
140 142 raise exc.HTTPExpectationFailed()
141 143
142 144 if git_command in [u'git-receive-pack']:
143 145 # updating refs manually after each push.
144 146 # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
145 cmd = (u'git --git-dir "%s" '
146 'update-server-info' % self.content_path)
147 _git_path = rhodecode.CONFIG.get('git_path', 'git')
148 cmd = (u'%s --git-dir "%s" '
149 'update-server-info' % (_git_path, self.content_path))
147 150 log.debug('handling cmd %s' % cmd)
148 151 subprocess.call(cmd, shell=True)
149 152
150 153 resp = Response()
151 154 resp.content_type = 'application/x-%s-result' % git_command.encode('utf8')
152 155 resp.charset = None
153 156 resp.app_iter = out
154 157 return resp
155 158
156 159 def __call__(self, environ, start_response):
157 160 request = Request(environ)
158 161 _path = self._get_fixedpath(request.path_info)
159 162 if _path.startswith('info/refs'):
160 163 app = self.inforefs
161 164 elif [a for a in self.valid_accepts if a in request.accept]:
162 165 app = self.backend
163 166 try:
164 167 resp = app(request, environ)
165 168 except exc.HTTPException, e:
166 169 resp = e
167 170 log.error(traceback.format_exc())
168 171 except Exception, e:
169 172 log.error(traceback.format_exc())
170 173 resp = exc.HTTPInternalServerError()
171 174 return resp(environ, start_response)
172 175
173 176
174 177 class GitDirectory(object):
175 178
176 179 def __init__(self, repo_root, repo_name, extras):
177 180 repo_location = os.path.join(repo_root, repo_name)
178 181 if not os.path.isdir(repo_location):
179 182 raise OSError(repo_location)
180 183
181 184 self.content_path = repo_location
182 185 self.repo_name = repo_name
183 186 self.repo_location = repo_location
184 187 self.extras = extras
185 188
186 189 def __call__(self, environ, start_response):
187 190 content_path = self.content_path
188 191 try:
189 192 app = GitRepository(self.repo_name, content_path, self.extras)
190 193 except (AssertionError, OSError):
191 194 content_path = os.path.join(content_path, '.git')
192 195 if os.path.isdir(content_path):
193 196 app = GitRepository(self.repo_name, content_path, self.extras)
194 197 else:
195 198 return exc.HTTPNotFound()(environ, start_response)
196 199 return app(environ, start_response)
197 200
198 201
199 202 def make_wsgi_app(repo_name, repo_root, extras):
200 203 return GitDirectory(repo_root, repo_name, extras)
@@ -1,801 +1,800 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.utils
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Utilities library for RhodeCode
7 7
8 8 :created_on: Apr 18, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import datetime
30 30 import traceback
31 31 import paste
32 32 import beaker
33 33 import tarfile
34 34 import shutil
35 35 import decorator
36 36 import warnings
37 37 from os.path import abspath
38 38 from os.path import dirname as dn, join as jn
39 39
40 40 from paste.script.command import Command, BadCommand
41 41
42 42 from mercurial import ui, config
43 43
44 44 from webhelpers.text import collapse, remove_formatting, strip_tags
45 45
46 46 from rhodecode.lib.vcs import get_backend
47 47 from rhodecode.lib.vcs.backends.base import BaseChangeset
48 48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm
50 50 from rhodecode.lib.vcs.exceptions import VCSError
51 51
52 52 from rhodecode.lib.caching_query import FromCache
53 53
54 54 from rhodecode.model import meta
55 55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
56 56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
57 57 from rhodecode.model.meta import Session
58 58 from rhodecode.model.repos_group import ReposGroupModel
59 59 from rhodecode.lib.utils2 import safe_str, safe_unicode
60 60 from rhodecode.lib.vcs.utils.fakemod import create_module
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65 65
66 66
67 67 def recursive_replace(str_, replace=' '):
68 68 """
69 69 Recursive replace of given sign to just one instance
70 70
71 71 :param str_: given string
72 72 :param replace: char to find and replace multiple instances
73 73
74 74 Examples::
75 75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 76 'Mighty-Mighty-Bo-sstones'
77 77 """
78 78
79 79 if str_.find(replace * 2) == -1:
80 80 return str_
81 81 else:
82 82 str_ = str_.replace(replace * 2, replace)
83 83 return recursive_replace(str_, replace)
84 84
85 85
86 86 def repo_name_slug(value):
87 87 """
88 88 Return slug of name of repository
89 89 This function is called on each creation/modification
90 90 of repository to prevent bad names in repo
91 91 """
92 92
93 93 slug = remove_formatting(value)
94 94 slug = strip_tags(slug)
95 95
96 96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 97 slug = slug.replace(c, '-')
98 98 slug = recursive_replace(slug, '-')
99 99 slug = collapse(slug, '-')
100 100 return slug
101 101
102 102
103 103 def get_repo_slug(request):
104 104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 105 if _repo:
106 106 _repo = _repo.rstrip('/')
107 107 return _repo
108 108
109 109
110 110 def get_repos_group_slug(request):
111 111 _group = request.environ['pylons.routes_dict'].get('group_name')
112 112 if _group:
113 113 _group = _group.rstrip('/')
114 114 return _group
115 115
116 116
117 117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
118 118 """
119 119 Action logger for various actions made by users
120 120
121 121 :param user: user that made this action, can be a unique username string or
122 122 object containing user_id attribute
123 123 :param action: action to log, should be on of predefined unique actions for
124 124 easy translations
125 125 :param repo: string name of repository or object containing repo_id,
126 126 that action was made on
127 127 :param ipaddr: optional ip address from what the action was made
128 128 :param sa: optional sqlalchemy session
129 129
130 130 """
131 131
132 132 if not sa:
133 133 sa = meta.Session()
134 134
135 135 try:
136 136 if hasattr(user, 'user_id'):
137 137 user_obj = User.get(user.user_id)
138 138 elif isinstance(user, basestring):
139 139 user_obj = User.get_by_username(user)
140 140 else:
141 141 raise Exception('You have to provide a user object or a username')
142 142
143 143 if hasattr(repo, 'repo_id'):
144 144 repo_obj = Repository.get(repo.repo_id)
145 145 repo_name = repo_obj.repo_name
146 146 elif isinstance(repo, basestring):
147 147 repo_name = repo.lstrip('/')
148 148 repo_obj = Repository.get_by_repo_name(repo_name)
149 149 else:
150 150 repo_obj = None
151 151 repo_name = ''
152 152
153 153 user_log = UserLog()
154 154 user_log.user_id = user_obj.user_id
155 155 user_log.username = user_obj.username
156 156 user_log.action = safe_unicode(action)
157 157
158 158 user_log.repository = repo_obj
159 159 user_log.repository_name = repo_name
160 160
161 161 user_log.action_date = datetime.datetime.now()
162 162 user_log.user_ip = ipaddr
163 163 sa.add(user_log)
164 164
165 165 log.info('Logging action %s on %s by %s' %
166 166 (action, safe_unicode(repo), user_obj))
167 167 if commit:
168 168 sa.commit()
169 169 except:
170 170 log.error(traceback.format_exc())
171 171 raise
172 172
173 173
174 174 def get_repos(path, recursive=False, skip_removed_repos=True):
175 175 """
176 176 Scans given path for repos and return (name,(type,path)) tuple
177 177
178 178 :param path: path to scan for repositories
179 179 :param recursive: recursive search and return names with subdirs in front
180 180 """
181 181
182 182 # remove ending slash for better results
183 183 path = path.rstrip(os.sep)
184 184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
185 185
186 186 def _get_repos(p):
187 187 if not os.access(p, os.W_OK):
188 188 return
189 189 for dirpath in os.listdir(p):
190 190 if os.path.isfile(os.path.join(p, dirpath)):
191 191 continue
192 192 cur_path = os.path.join(p, dirpath)
193 193
194 194 # skip removed repos
195 195 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
196 196 continue
197 197
198 198 #skip .<somethin> dirs
199 199 if dirpath.startswith('.'):
200 200 continue
201 201
202 202 try:
203 203 scm_info = get_scm(cur_path)
204 204 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
205 205 except VCSError:
206 206 if not recursive:
207 207 continue
208 208 #check if this dir containts other repos for recursive scan
209 209 rec_path = os.path.join(p, dirpath)
210 210 if os.path.isdir(rec_path):
211 211 for inner_scm in _get_repos(rec_path):
212 212 yield inner_scm
213 213
214 214 return _get_repos(path)
215 215
216 216 #alias for backward compat
217 217 get_filesystem_repos = get_repos
218 218
219 219
220 220 def is_valid_repo(repo_name, base_path, scm=None):
221 221 """
222 222 Returns True if given path is a valid repository False otherwise.
223 223 If scm param is given also compare if given scm is the same as expected
224 224 from scm parameter
225 225
226 226 :param repo_name:
227 227 :param base_path:
228 228 :param scm:
229 229
230 230 :return True: if given path is a valid repository
231 231 """
232 232 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
233 233
234 234 try:
235 235 scm_ = get_scm(full_path)
236 236 if scm:
237 237 return scm_[0] == scm
238 238 return True
239 239 except VCSError:
240 240 return False
241 241
242 242
243 243 def is_valid_repos_group(repos_group_name, base_path):
244 244 """
245 245 Returns True if given path is a repos group False otherwise
246 246
247 247 :param repo_name:
248 248 :param base_path:
249 249 """
250 250 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
251 251
252 252 # check if it's not a repo
253 253 if is_valid_repo(repos_group_name, base_path):
254 254 return False
255 255
256 256 try:
257 257 # we need to check bare git repos at higher level
258 258 # since we might match branches/hooks/info/objects or possible
259 259 # other things inside bare git repo
260 260 get_scm(os.path.dirname(full_path))
261 261 return False
262 262 except VCSError:
263 263 pass
264 264
265 265 # check if it's a valid path
266 266 if os.path.isdir(full_path):
267 267 return True
268 268
269 269 return False
270 270
271 271
272 272 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
273 273 while True:
274 274 ok = raw_input(prompt)
275 275 if ok in ('y', 'ye', 'yes'):
276 276 return True
277 277 if ok in ('n', 'no', 'nop', 'nope'):
278 278 return False
279 279 retries = retries - 1
280 280 if retries < 0:
281 281 raise IOError
282 282 print complaint
283 283
284 284 #propagated from mercurial documentation
285 285 ui_sections = ['alias', 'auth',
286 286 'decode/encode', 'defaults',
287 287 'diff', 'email',
288 288 'extensions', 'format',
289 289 'merge-patterns', 'merge-tools',
290 290 'hooks', 'http_proxy',
291 291 'smtp', 'patch',
292 292 'paths', 'profiling',
293 293 'server', 'trusted',
294 294 'ui', 'web', ]
295 295
296 296
297 297 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
298 298 """
299 299 A function that will read python rc files or database
300 300 and make an mercurial ui object from read options
301 301
302 302 :param path: path to mercurial config file
303 303 :param checkpaths: check the path
304 304 :param read_from: read from 'file' or 'db'
305 305 """
306 306
307 307 baseui = ui.ui()
308 308
309 309 # clean the baseui object
310 310 baseui._ocfg = config.config()
311 311 baseui._ucfg = config.config()
312 312 baseui._tcfg = config.config()
313 313
314 314 if read_from == 'file':
315 315 if not os.path.isfile(path):
316 316 log.debug('hgrc file is not present at %s, skipping...' % path)
317 317 return False
318 318 log.debug('reading hgrc from %s' % path)
319 319 cfg = config.config()
320 320 cfg.read(path)
321 321 for section in ui_sections:
322 322 for k, v in cfg.items(section):
323 323 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
324 324 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
325 325
326 326 elif read_from == 'db':
327 327 sa = meta.Session()
328 328 ret = sa.query(RhodeCodeUi)\
329 329 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
330 330 .all()
331 331
332 332 hg_ui = ret
333 333 for ui_ in hg_ui:
334 334 if ui_.ui_active:
335 335 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
336 336 ui_.ui_key, ui_.ui_value)
337 337 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
338 338 safe_str(ui_.ui_value))
339 339 if ui_.ui_key == 'push_ssl':
340 340 # force set push_ssl requirement to False, rhodecode
341 341 # handles that
342 342 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
343 343 False)
344 344 if clear_session:
345 345 meta.Session.remove()
346 346 return baseui
347 347
348 348
349 349 def set_rhodecode_config(config):
350 350 """
351 351 Updates pylons config with new settings from database
352 352
353 353 :param config:
354 354 """
355 355 hgsettings = RhodeCodeSetting.get_app_settings()
356 356
357 357 for k, v in hgsettings.items():
358 358 config[k] = v
359 359
360 360
361 361 def invalidate_cache(cache_key, *args):
362 362 """
363 363 Puts cache invalidation task into db for
364 364 further global cache invalidation
365 365 """
366 366
367 367 from rhodecode.model.scm import ScmModel
368 368
369 369 if cache_key.startswith('get_repo_cached_'):
370 370 name = cache_key.split('get_repo_cached_')[-1]
371 371 ScmModel().mark_for_invalidation(name)
372 372
373 373
374 374 def map_groups(path):
375 375 """
376 376 Given a full path to a repository, create all nested groups that this
377 377 repo is inside. This function creates parent-child relationships between
378 378 groups and creates default perms for all new groups.
379 379
380 380 :param paths: full path to repository
381 381 """
382 382 sa = meta.Session()
383 383 groups = path.split(Repository.url_sep())
384 384 parent = None
385 385 group = None
386 386
387 387 # last element is repo in nested groups structure
388 388 groups = groups[:-1]
389 389 rgm = ReposGroupModel(sa)
390 390 for lvl, group_name in enumerate(groups):
391 391 group_name = '/'.join(groups[:lvl] + [group_name])
392 392 group = RepoGroup.get_by_group_name(group_name)
393 393 desc = '%s group' % group_name
394 394
395 395 # skip folders that are now removed repos
396 396 if REMOVED_REPO_PAT.match(group_name):
397 397 break
398 398
399 399 if group is None:
400 400 log.debug('creating group level: %s group_name: %s' % (lvl,
401 401 group_name))
402 402 group = RepoGroup(group_name, parent)
403 403 group.group_description = desc
404 404 sa.add(group)
405 405 rgm._create_default_perms(group)
406 406 sa.flush()
407 407 parent = group
408 408 return group
409 409
410 410
411 411 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
412 412 install_git_hook=False):
413 413 """
414 414 maps all repos given in initial_repo_list, non existing repositories
415 415 are created, if remove_obsolete is True it also check for db entries
416 416 that are not in initial_repo_list and removes them.
417 417
418 418 :param initial_repo_list: list of repositories found by scanning methods
419 419 :param remove_obsolete: check for obsolete entries in database
420 420 :param install_git_hook: if this is True, also check and install githook
421 421 for a repo if missing
422 422 """
423 423 from rhodecode.model.repo import RepoModel
424 424 from rhodecode.model.scm import ScmModel
425 425 sa = meta.Session()
426 426 rm = RepoModel()
427 427 user = sa.query(User).filter(User.admin == True).first()
428 428 if user is None:
429 429 raise Exception('Missing administrative account!')
430 430 added = []
431 431
432 432 # # clear cache keys
433 433 # log.debug("Clearing cache keys now...")
434 434 # CacheInvalidation.clear_cache()
435 435 # sa.commit()
436 436
437 437 ##creation defaults
438 438 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
439 439 enable_statistics = defs.get('repo_enable_statistics')
440 440 enable_locking = defs.get('repo_enable_locking')
441 441 enable_downloads = defs.get('repo_enable_downloads')
442 442 private = defs.get('repo_private')
443 443
444 444 for name, repo in initial_repo_list.items():
445 445 group = map_groups(name)
446 446 db_repo = rm.get_by_repo_name(name)
447 447 # found repo that is on filesystem not in RhodeCode database
448 448 if not db_repo:
449 449 log.info('repository %s not found, creating now' % name)
450 450 added.append(name)
451 451 desc = (repo.description
452 452 if repo.description != 'unknown'
453 453 else '%s repository' % name)
454 454
455 455 new_repo = rm.create_repo(
456 456 repo_name=name,
457 457 repo_type=repo.alias,
458 458 description=desc,
459 459 repos_group=getattr(group, 'group_id', None),
460 460 owner=user,
461 461 just_db=True,
462 462 enable_locking=enable_locking,
463 463 enable_downloads=enable_downloads,
464 464 enable_statistics=enable_statistics,
465 465 private=private
466 466 )
467 467 # we added that repo just now, and make sure it has githook
468 468 # installed
469 469 if new_repo.repo_type == 'git':
470 470 ScmModel().install_git_hook(new_repo.scm_instance)
471 471 new_repo.update_changeset_cache()
472 472 elif install_git_hook:
473 473 if db_repo.repo_type == 'git':
474 474 ScmModel().install_git_hook(db_repo.scm_instance)
475 475 # during starting install all cache keys for all repositories in the
476 476 # system, this will register all repos and multiple instances
477 477 key, _prefix, _org_key = CacheInvalidation._get_key(name)
478 478 CacheInvalidation.invalidate(name)
479 479 log.debug("Creating a cache key for %s, instance_id %s"
480 480 % (name, _prefix or 'unknown'))
481 481
482 482 sa.commit()
483 483 removed = []
484 484 if remove_obsolete:
485 485 # remove from database those repositories that are not in the filesystem
486 486 for repo in sa.query(Repository).all():
487 487 if repo.repo_name not in initial_repo_list.keys():
488 488 log.debug("Removing non-existing repository found in db `%s`" %
489 489 repo.repo_name)
490 490 try:
491 491 sa.delete(repo)
492 492 sa.commit()
493 493 removed.append(repo.repo_name)
494 494 except:
495 495 #don't hold further removals on error
496 496 log.error(traceback.format_exc())
497 497 sa.rollback()
498 498
499 499 return added, removed
500 500
501 501
502 502 # set cache regions for beaker so celery can utilise it
503 503 def add_cache(settings):
504 504 cache_settings = {'regions': None}
505 505 for key in settings.keys():
506 506 for prefix in ['beaker.cache.', 'cache.']:
507 507 if key.startswith(prefix):
508 508 name = key.split(prefix)[1].strip()
509 509 cache_settings[name] = settings[key].strip()
510 510 if cache_settings['regions']:
511 511 for region in cache_settings['regions'].split(','):
512 512 region = region.strip()
513 513 region_settings = {}
514 514 for key, value in cache_settings.items():
515 515 if key.startswith(region):
516 516 region_settings[key.split('.')[1]] = value
517 517 region_settings['expire'] = int(region_settings.get('expire',
518 518 60))
519 519 region_settings.setdefault('lock_dir',
520 520 cache_settings.get('lock_dir'))
521 521 region_settings.setdefault('data_dir',
522 522 cache_settings.get('data_dir'))
523 523
524 524 if 'type' not in region_settings:
525 525 region_settings['type'] = cache_settings.get('type',
526 526 'memory')
527 527 beaker.cache.cache_regions[region] = region_settings
528 528
529 529
530 530 def load_rcextensions(root_path):
531 531 import rhodecode
532 532 from rhodecode.config import conf
533 533
534 534 path = os.path.join(root_path, 'rcextensions', '__init__.py')
535 535 if os.path.isfile(path):
536 536 rcext = create_module('rc', path)
537 537 EXT = rhodecode.EXTENSIONS = rcext
538 538 log.debug('Found rcextensions now loading %s...' % rcext)
539 539
540 540 # Additional mappings that are not present in the pygments lexers
541 541 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
542 542
543 543 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
544 544
545 545 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
546 546 log.debug('settings custom INDEX_EXTENSIONS')
547 547 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
548 548
549 549 #ADDITIONAL MAPPINGS
550 550 log.debug('adding extra into INDEX_EXTENSIONS')
551 551 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
552 552
553 553 # auto check if the module is not missing any data, set to default if is
554 554 # this will help autoupdate new feature of rcext module
555 555 from rhodecode.config import rcextensions
556 556 for k in dir(rcextensions):
557 557 if not k.startswith('_') and not hasattr(EXT, k):
558 558 setattr(EXT, k, getattr(rcextensions, k))
559 559
560 560
561 561 def get_custom_lexer(extension):
562 562 """
563 563 returns a custom lexer if it's defined in rcextensions module, or None
564 564 if there's no custom lexer defined
565 565 """
566 566 import rhodecode
567 567 from pygments import lexers
568 568 #check if we didn't define this extension as other lexer
569 569 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
570 570 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
571 571 return lexers.get_lexer_by_name(_lexer_name)
572 572
573 573
574 574 #==============================================================================
575 575 # TEST FUNCTIONS AND CREATORS
576 576 #==============================================================================
577 577 def create_test_index(repo_location, config, full_index):
578 578 """
579 579 Makes default test index
580 580
581 581 :param config: test config
582 582 :param full_index:
583 583 """
584 584
585 585 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
586 586 from rhodecode.lib.pidlock import DaemonLock, LockHeld
587 587
588 588 repo_location = repo_location
589 589
590 590 index_location = os.path.join(config['app_conf']['index_dir'])
591 591 if not os.path.exists(index_location):
592 592 os.makedirs(index_location)
593 593
594 594 try:
595 595 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
596 596 WhooshIndexingDaemon(index_location=index_location,
597 597 repo_location=repo_location)\
598 598 .run(full_index=full_index)
599 599 l.release()
600 600 except LockHeld:
601 601 pass
602 602
603 603
604 604 def create_test_env(repos_test_path, config):
605 605 """
606 606 Makes a fresh database and
607 607 install test repository into tmp dir
608 608 """
609 609 from rhodecode.lib.db_manage import DbManage
610 610 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
611 611
612 612 # PART ONE create db
613 613 dbconf = config['sqlalchemy.db1.url']
614 614 log.debug('making test db %s' % dbconf)
615 615
616 616 # create test dir if it doesn't exist
617 617 if not os.path.isdir(repos_test_path):
618 618 log.debug('Creating testdir %s' % repos_test_path)
619 619 os.makedirs(repos_test_path)
620 620
621 621 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
622 622 tests=True)
623 623 dbmanage.create_tables(override=True)
624 624 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
625 625 dbmanage.create_default_user()
626 626 dbmanage.admin_prompt()
627 627 dbmanage.create_permissions()
628 628 dbmanage.populate_default_permissions()
629 629 Session().commit()
630 630 # PART TWO make test repo
631 631 log.debug('making test vcs repositories')
632 632
633 633 idx_path = config['app_conf']['index_dir']
634 634 data_path = config['app_conf']['cache_dir']
635 635
636 636 #clean index and data
637 637 if idx_path and os.path.exists(idx_path):
638 638 log.debug('remove %s' % idx_path)
639 639 shutil.rmtree(idx_path)
640 640
641 641 if data_path and os.path.exists(data_path):
642 642 log.debug('remove %s' % data_path)
643 643 shutil.rmtree(data_path)
644 644
645 645 #CREATE DEFAULT TEST REPOS
646 646 cur_dir = dn(dn(abspath(__file__)))
647 647 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
648 648 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
649 649 tar.close()
650 650
651 651 cur_dir = dn(dn(abspath(__file__)))
652 652 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
653 653 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
654 654 tar.close()
655 655
656 656 #LOAD VCS test stuff
657 657 from rhodecode.tests.vcs import setup_package
658 658 setup_package()
659 659
660 660
661 661 #==============================================================================
662 662 # PASTER COMMANDS
663 663 #==============================================================================
664 664 class BasePasterCommand(Command):
665 665 """
666 666 Abstract Base Class for paster commands.
667 667
668 668 The celery commands are somewhat aggressive about loading
669 669 celery.conf, and since our module sets the `CELERY_LOADER`
670 670 environment variable to our loader, we have to bootstrap a bit and
671 671 make sure we've had a chance to load the pylons config off of the
672 672 command line, otherwise everything fails.
673 673 """
674 674 min_args = 1
675 675 min_args_error = "Please provide a paster config file as an argument."
676 676 takes_config_file = 1
677 677 requires_config_file = True
678 678
679 679 def notify_msg(self, msg, log=False):
680 680 """Make a notification to user, additionally if logger is passed
681 681 it logs this action using given logger
682 682
683 683 :param msg: message that will be printed to user
684 684 :param log: logging instance, to use to additionally log this message
685 685
686 686 """
687 687 if log and isinstance(log, logging):
688 688 log(msg)
689 689
690 690 def run(self, args):
691 691 """
692 692 Overrides Command.run
693 693
694 694 Checks for a config file argument and loads it.
695 695 """
696 696 if len(args) < self.min_args:
697 697 raise BadCommand(
698 698 self.min_args_error % {'min_args': self.min_args,
699 699 'actual_args': len(args)})
700 700
701 701 # Decrement because we're going to lob off the first argument.
702 702 # @@ This is hacky
703 703 self.min_args -= 1
704 704 self.bootstrap_config(args[0])
705 705 self.update_parser()
706 706 return super(BasePasterCommand, self).run(args[1:])
707 707
708 708 def update_parser(self):
709 709 """
710 710 Abstract method. Allows for the class's parser to be updated
711 711 before the superclass's `run` method is called. Necessary to
712 712 allow options/arguments to be passed through to the underlying
713 713 celery command.
714 714 """
715 715 raise NotImplementedError("Abstract Method.")
716 716
717 717 def bootstrap_config(self, conf):
718 718 """
719 719 Loads the pylons configuration.
720 720 """
721 721 from pylons import config as pylonsconfig
722 722
723 723 self.path_to_ini_file = os.path.realpath(conf)
724 724 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
725 725 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
726 726
727 727 def _init_session(self):
728 728 """
729 729 Inits SqlAlchemy Session
730 730 """
731 731 logging.config.fileConfig(self.path_to_ini_file)
732 732 from pylons import config
733 733 from rhodecode.model import init_model
734 734 from rhodecode.lib.utils2 import engine_from_config
735 735
736 736 #get to remove repos !!
737 737 add_cache(config)
738 738 engine = engine_from_config(config, 'sqlalchemy.db1.')
739 739 init_model(engine)
740 740
741 741
742 742 def check_git_version():
743 743 """
744 744 Checks what version of git is installed in system, and issues a warning
745 745 if it's too old for RhodeCode to properly work.
746 746 """
747 import subprocess
747 from rhodecode import BACKENDS
748 from rhodecode.lib.vcs.backends.git.repository import GitRepository
748 749 from distutils.version import StrictVersion
749 from rhodecode import BACKENDS
750 750
751 p = subprocess.Popen('git --version', shell=True,
752 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
753 stdout, stderr = p.communicate()
751 stdout, stderr = GitRepository._run_git_command('--version')
752
754 753 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
755 754 if len(ver.split('.')) > 3:
756 755 #StrictVersion needs to be only 3 element type
757 756 ver = '.'.join(ver.split('.')[:3])
758 757 try:
759 758 _ver = StrictVersion(ver)
760 759 except:
761 760 _ver = StrictVersion('0.0.0')
762 761 stderr = traceback.format_exc()
763 762
764 763 req_ver = '1.7.4'
765 764 to_old_git = False
766 765 if _ver < StrictVersion(req_ver):
767 766 to_old_git = True
768 767
769 768 if 'git' in BACKENDS:
770 769 log.debug('GIT version detected: %s' % stdout)
771 770 if stderr:
772 771 log.warning('Unable to detect git version org error was:%r' % stderr)
773 772 elif to_old_git:
774 773 log.warning('RhodeCode detected git version %s, which is too old '
775 774 'for the system to function properly. Make sure '
776 775 'its version is at least %s' % (ver, req_ver))
777 776 return _ver
778 777
779 778
780 779 @decorator.decorator
781 780 def jsonify(func, *args, **kwargs):
782 781 """Action decorator that formats output for JSON
783 782
784 783 Given a function that will return content, this decorator will turn
785 784 the result into JSON, with a content-type of 'application/json' and
786 785 output it.
787 786
788 787 """
789 788 from pylons.decorators.util import get_pylons
790 789 from rhodecode.lib.ext_json import json
791 790 pylons = get_pylons(args)
792 791 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
793 792 data = func(*args, **kwargs)
794 793 if isinstance(data, (list, tuple)):
795 794 msg = "JSON responses with Array envelopes are susceptible to " \
796 795 "cross-site data leak attacks, see " \
797 796 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
798 797 warnings.warn(msg, Warning, 2)
799 798 log.warning(msg)
800 799 log.debug("Returning JSON wrapped action output")
801 800 return json.dumps(data, encoding='utf-8')
@@ -1,542 +1,544 b''
1 1 import re
2 2 from itertools import chain
3 3 from dulwich import objects
4 4 from subprocess import Popen, PIPE
5 import rhodecode
5 6 from rhodecode.lib.vcs.conf import settings
6 7 from rhodecode.lib.vcs.exceptions import RepositoryError
7 8 from rhodecode.lib.vcs.exceptions import ChangesetError
8 9 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
9 10 from rhodecode.lib.vcs.exceptions import VCSError
10 11 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
11 12 from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError
12 13 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
13 14 from rhodecode.lib.vcs.nodes import FileNode, DirNode, NodeKind, RootNode, \
14 15 RemovedFileNode, SubModuleNode, ChangedFileNodesGenerator,\
15 16 AddedFileNodesGenerator, RemovedFileNodesGenerator
16 17 from rhodecode.lib.vcs.utils import safe_unicode
17 18 from rhodecode.lib.vcs.utils import date_fromtimestamp
18 19 from rhodecode.lib.vcs.utils.lazy import LazyProperty
19 20
20 21
21 22 class GitChangeset(BaseChangeset):
22 23 """
23 24 Represents state of the repository at single revision.
24 25 """
25 26
26 27 def __init__(self, repository, revision):
27 28 self._stat_modes = {}
28 29 self.repository = repository
29 30
30 31 try:
31 32 commit = self.repository._repo.get_object(revision)
32 33 if isinstance(commit, objects.Tag):
33 34 revision = commit.object[1]
34 35 commit = self.repository._repo.get_object(commit.object[1])
35 36 except KeyError:
36 37 raise RepositoryError("Cannot get object with id %s" % revision)
37 38 self.raw_id = revision
38 39 self.id = self.raw_id
39 40 self.short_id = self.raw_id[:12]
40 41 self._commit = commit
41 42
42 43 self._tree_id = commit.tree
43 44 self._commiter_property = 'committer'
44 45 self._author_property = 'author'
45 46 self._date_property = 'commit_time'
46 47 self._date_tz_property = 'commit_timezone'
47 48 self.revision = repository.revisions.index(revision)
48 49
49 50 self.message = safe_unicode(commit.message)
50 51
51 52 self.nodes = {}
52 53 self._paths = {}
53 54
54 55 @LazyProperty
55 56 def commiter(self):
56 57 return safe_unicode(getattr(self._commit, self._commiter_property))
57 58
58 59 @LazyProperty
59 60 def author(self):
60 61 return safe_unicode(getattr(self._commit, self._author_property))
61 62
62 63 @LazyProperty
63 64 def date(self):
64 65 return date_fromtimestamp(getattr(self._commit, self._date_property),
65 66 getattr(self._commit, self._date_tz_property))
66 67
67 68 @LazyProperty
68 69 def _timestamp(self):
69 70 return getattr(self._commit, self._date_property)
70 71
71 72 @LazyProperty
72 73 def status(self):
73 74 """
74 75 Returns modified, added, removed, deleted files for current changeset
75 76 """
76 77 return self.changed, self.added, self.removed
77 78
78 79 @LazyProperty
79 80 def tags(self):
80 81 _tags = []
81 82 for tname, tsha in self.repository.tags.iteritems():
82 83 if tsha == self.raw_id:
83 84 _tags.append(tname)
84 85 return _tags
85 86
86 87 @LazyProperty
87 88 def branch(self):
88 89
89 90 heads = self.repository._heads(reverse=False)
90 91
91 92 ref = heads.get(self.raw_id)
92 93 if ref:
93 94 return safe_unicode(ref)
94 95
95 96 def _fix_path(self, path):
96 97 """
97 98 Paths are stored without trailing slash so we need to get rid off it if
98 99 needed.
99 100 """
100 101 if path.endswith('/'):
101 102 path = path.rstrip('/')
102 103 return path
103 104
104 105 def _get_id_for_path(self, path):
105 106
106 107 # FIXME: Please, spare a couple of minutes and make those codes cleaner;
107 108 if not path in self._paths:
108 109 path = path.strip('/')
109 110 # set root tree
110 111 tree = self.repository._repo[self._tree_id]
111 112 if path == '':
112 113 self._paths[''] = tree.id
113 114 return tree.id
114 115 splitted = path.split('/')
115 116 dirs, name = splitted[:-1], splitted[-1]
116 117 curdir = ''
117 118
118 119 # initially extract things from root dir
119 120 for item, stat, id in tree.iteritems():
120 121 if curdir:
121 122 name = '/'.join((curdir, item))
122 123 else:
123 124 name = item
124 125 self._paths[name] = id
125 126 self._stat_modes[name] = stat
126 127
127 128 for dir in dirs:
128 129 if curdir:
129 130 curdir = '/'.join((curdir, dir))
130 131 else:
131 132 curdir = dir
132 133 dir_id = None
133 134 for item, stat, id in tree.iteritems():
134 135 if dir == item:
135 136 dir_id = id
136 137 if dir_id:
137 138 # Update tree
138 139 tree = self.repository._repo[dir_id]
139 140 if not isinstance(tree, objects.Tree):
140 141 raise ChangesetError('%s is not a directory' % curdir)
141 142 else:
142 143 raise ChangesetError('%s have not been found' % curdir)
143 144
144 145 # cache all items from the given traversed tree
145 146 for item, stat, id in tree.iteritems():
146 147 if curdir:
147 148 name = '/'.join((curdir, item))
148 149 else:
149 150 name = item
150 151 self._paths[name] = id
151 152 self._stat_modes[name] = stat
152 153 if not path in self._paths:
153 154 raise NodeDoesNotExistError("There is no file nor directory "
154 155 "at the given path %r at revision %r"
155 156 % (path, self.short_id))
156 157 return self._paths[path]
157 158
158 159 def _get_kind(self, path):
159 160 obj = self.repository._repo[self._get_id_for_path(path)]
160 161 if isinstance(obj, objects.Blob):
161 162 return NodeKind.FILE
162 163 elif isinstance(obj, objects.Tree):
163 164 return NodeKind.DIR
164 165
165 166 def _get_filectx(self, path):
166 167 path = self._fix_path(path)
167 168 if self._get_kind(path) != NodeKind.FILE:
168 169 raise ChangesetError("File does not exist for revision %r at "
169 170 " %r" % (self.raw_id, path))
170 171 return path
171 172
172 173 def _get_file_nodes(self):
173 174 return chain(*(t[2] for t in self.walk()))
174 175
175 176 @LazyProperty
176 177 def parents(self):
177 178 """
178 179 Returns list of parents changesets.
179 180 """
180 181 return [self.repository.get_changeset(parent)
181 182 for parent in self._commit.parents]
182 183
183 184 @LazyProperty
184 185 def children(self):
185 186 """
186 187 Returns list of children changesets.
187 188 """
188 189 so, se = self.repository.run_git_command(
189 190 "rev-list --all --children | grep '^%s'" % self.raw_id
190 191 )
191 192
192 193 children = []
193 194 for l in so.splitlines():
194 195 childs = l.split(' ')[1:]
195 196 children.extend(childs)
196 197 return [self.repository.get_changeset(cs) for cs in children]
197 198
198 199 def next(self, branch=None):
199 200
200 201 if branch and self.branch != branch:
201 202 raise VCSError('Branch option used on changeset not belonging '
202 203 'to that branch')
203 204
204 205 def _next(changeset, branch):
205 206 try:
206 207 next_ = changeset.revision + 1
207 208 next_rev = changeset.repository.revisions[next_]
208 209 except IndexError:
209 210 raise ChangesetDoesNotExistError
210 211 cs = changeset.repository.get_changeset(next_rev)
211 212
212 213 if branch and branch != cs.branch:
213 214 return _next(cs, branch)
214 215
215 216 return cs
216 217
217 218 return _next(self, branch)
218 219
219 220 def prev(self, branch=None):
220 221 if branch and self.branch != branch:
221 222 raise VCSError('Branch option used on changeset not belonging '
222 223 'to that branch')
223 224
224 225 def _prev(changeset, branch):
225 226 try:
226 227 prev_ = changeset.revision - 1
227 228 if prev_ < 0:
228 229 raise IndexError
229 230 prev_rev = changeset.repository.revisions[prev_]
230 231 except IndexError:
231 232 raise ChangesetDoesNotExistError
232 233
233 234 cs = changeset.repository.get_changeset(prev_rev)
234 235
235 236 if branch and branch != cs.branch:
236 237 return _prev(cs, branch)
237 238
238 239 return cs
239 240
240 241 return _prev(self, branch)
241 242
242 243 def diff(self, ignore_whitespace=True, context=3):
243 244 rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
244 245 rev2 = self
245 246 return ''.join(self.repository.get_diff(rev1, rev2,
246 247 ignore_whitespace=ignore_whitespace,
247 248 context=context))
248 249
249 250 def get_file_mode(self, path):
250 251 """
251 252 Returns stat mode of the file at the given ``path``.
252 253 """
253 254 # ensure path is traversed
254 255 self._get_id_for_path(path)
255 256 return self._stat_modes[path]
256 257
257 258 def get_file_content(self, path):
258 259 """
259 260 Returns content of the file at given ``path``.
260 261 """
261 262 id = self._get_id_for_path(path)
262 263 blob = self.repository._repo[id]
263 264 return blob.as_pretty_string()
264 265
265 266 def get_file_size(self, path):
266 267 """
267 268 Returns size of the file at given ``path``.
268 269 """
269 270 id = self._get_id_for_path(path)
270 271 blob = self.repository._repo[id]
271 272 return blob.raw_length()
272 273
273 274 def get_file_changeset(self, path):
274 275 """
275 276 Returns last commit of the file at the given ``path``.
276 277 """
277 278 node = self.get_node(path)
278 279 return node.history[0]
279 280
280 281 def get_file_history(self, path):
281 282 """
282 283 Returns history of file as reversed list of ``Changeset`` objects for
283 284 which file at given ``path`` has been modified.
284 285
285 286 TODO: This function now uses os underlying 'git' and 'grep' commands
286 287 which is generally not good. Should be replaced with algorithm
287 288 iterating commits.
288 289 """
289 290 self._get_filectx(path)
290 291
291 292 cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % (
292 293 self.id, path
293 294 )
294 295 so, se = self.repository.run_git_command(cmd)
295 296 ids = re.findall(r'[0-9a-fA-F]{40}', so)
296 297 return [self.repository.get_changeset(id) for id in ids]
297 298
298 299 def get_file_history_2(self, path):
299 300 """
300 301 Returns history of file as reversed list of ``Changeset`` objects for
301 302 which file at given ``path`` has been modified.
302 303
303 304 """
304 305 self._get_filectx(path)
305 306 from dulwich.walk import Walker
306 307 include = [self.id]
307 308 walker = Walker(self.repository._repo.object_store, include,
308 309 paths=[path], max_entries=1)
309 310 return [self.repository.get_changeset(sha)
310 311 for sha in (x.commit.id for x in walker)]
311 312
312 313 def get_file_annotate(self, path):
313 314 """
314 315 Returns a generator of four element tuples with
315 316 lineno, sha, changeset lazy loader and line
316 317
317 318 TODO: This function now uses os underlying 'git' command which is
318 319 generally not good. Should be replaced with algorithm iterating
319 320 commits.
320 321 """
321 322 cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path)
322 323 # -l ==> outputs long shas (and we need all 40 characters)
323 324 # --root ==> doesn't put '^' character for bounderies
324 325 # -r sha ==> blames for the given revision
325 326 so, se = self.repository.run_git_command(cmd)
326 327
327 328 for i, blame_line in enumerate(so.split('\n')[:-1]):
328 329 ln_no = i + 1
329 330 sha, line = re.split(r' ', blame_line, 1)
330 331 yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line)
331 332
332 333 def fill_archive(self, stream=None, kind='tgz', prefix=None,
333 334 subrepos=False):
334 335 """
335 336 Fills up given stream.
336 337
337 338 :param stream: file like object.
338 339 :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
339 340 Default: ``tgz``.
340 341 :param prefix: name of root directory in archive.
341 342 Default is repository name and changeset's raw_id joined with dash
342 343 (``repo-tip.<KIND>``).
343 344 :param subrepos: include subrepos in this archive.
344 345
345 346 :raise ImproperArchiveTypeError: If given kind is wrong.
346 347 :raise VcsError: If given stream is None
347 348
348 349 """
349 350 allowed_kinds = settings.ARCHIVE_SPECS.keys()
350 351 if kind not in allowed_kinds:
351 352 raise ImproperArchiveTypeError('Archive kind not supported use one'
352 353 'of %s', allowed_kinds)
353 354
354 355 if prefix is None:
355 356 prefix = '%s-%s' % (self.repository.name, self.short_id)
356 357 elif prefix.startswith('/'):
357 358 raise VCSError("Prefix cannot start with leading slash")
358 359 elif prefix.strip() == '':
359 360 raise VCSError("Prefix cannot be empty")
360 361
361 362 if kind == 'zip':
362 363 frmt = 'zip'
363 364 else:
364 365 frmt = 'tar'
365 cmd = 'git archive --format=%s --prefix=%s/ %s' % (frmt, prefix,
366 self.raw_id)
366 _git_path = rhodecode.CONFIG.get('git_path', 'git')
367 cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path,
368 frmt, prefix, self.raw_id)
367 369 if kind == 'tgz':
368 370 cmd += ' | gzip -9'
369 371 elif kind == 'tbz2':
370 372 cmd += ' | bzip2 -9'
371 373
372 374 if stream is None:
373 375 raise VCSError('You need to pass in a valid stream for filling'
374 376 ' with archival data')
375 377 popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
376 378 cwd=self.repository.path)
377 379
378 380 buffer_size = 1024 * 8
379 381 chunk = popen.stdout.read(buffer_size)
380 382 while chunk:
381 383 stream.write(chunk)
382 384 chunk = popen.stdout.read(buffer_size)
383 385 # Make sure all descriptors would be read
384 386 popen.communicate()
385 387
386 388 def get_nodes(self, path):
387 389 if self._get_kind(path) != NodeKind.DIR:
388 390 raise ChangesetError("Directory does not exist for revision %r at "
389 391 " %r" % (self.revision, path))
390 392 path = self._fix_path(path)
391 393 id = self._get_id_for_path(path)
392 394 tree = self.repository._repo[id]
393 395 dirnodes = []
394 396 filenodes = []
395 397 als = self.repository.alias
396 398 for name, stat, id in tree.iteritems():
397 399 if objects.S_ISGITLINK(stat):
398 400 dirnodes.append(SubModuleNode(name, url=None, changeset=id,
399 401 alias=als))
400 402 continue
401 403
402 404 obj = self.repository._repo.get_object(id)
403 405 if path != '':
404 406 obj_path = '/'.join((path, name))
405 407 else:
406 408 obj_path = name
407 409 if obj_path not in self._stat_modes:
408 410 self._stat_modes[obj_path] = stat
409 411 if isinstance(obj, objects.Tree):
410 412 dirnodes.append(DirNode(obj_path, changeset=self))
411 413 elif isinstance(obj, objects.Blob):
412 414 filenodes.append(FileNode(obj_path, changeset=self, mode=stat))
413 415 else:
414 416 raise ChangesetError("Requested object should be Tree "
415 417 "or Blob, is %r" % type(obj))
416 418 nodes = dirnodes + filenodes
417 419 for node in nodes:
418 420 if not node.path in self.nodes:
419 421 self.nodes[node.path] = node
420 422 nodes.sort()
421 423 return nodes
422 424
423 425 def get_node(self, path):
424 426 if isinstance(path, unicode):
425 427 path = path.encode('utf-8')
426 428 path = self._fix_path(path)
427 429 if not path in self.nodes:
428 430 try:
429 431 id_ = self._get_id_for_path(path)
430 432 except ChangesetError:
431 433 raise NodeDoesNotExistError("Cannot find one of parents' "
432 434 "directories for a given path: %s" % path)
433 435
434 436 _GL = lambda m: m and objects.S_ISGITLINK(m)
435 437 if _GL(self._stat_modes.get(path)):
436 438 node = SubModuleNode(path, url=None, changeset=id_,
437 439 alias=self.repository.alias)
438 440 else:
439 441 obj = self.repository._repo.get_object(id_)
440 442
441 443 if isinstance(obj, objects.Tree):
442 444 if path == '':
443 445 node = RootNode(changeset=self)
444 446 else:
445 447 node = DirNode(path, changeset=self)
446 448 node._tree = obj
447 449 elif isinstance(obj, objects.Blob):
448 450 node = FileNode(path, changeset=self)
449 451 node._blob = obj
450 452 else:
451 453 raise NodeDoesNotExistError("There is no file nor directory "
452 454 "at the given path %r at revision %r"
453 455 % (path, self.short_id))
454 456 # cache node
455 457 self.nodes[path] = node
456 458 return self.nodes[path]
457 459
458 460 @LazyProperty
459 461 def affected_files(self):
460 462 """
461 463 Get's a fast accessible file changes for given changeset
462 464 """
463 465 a, m, d = self._changes_cache
464 466 return list(a.union(m).union(d))
465 467
466 468 @LazyProperty
467 469 def _diff_name_status(self):
468 470 output = []
469 471 for parent in self.parents:
470 472 cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id,
471 473 self.raw_id)
472 474 so, se = self.repository.run_git_command(cmd)
473 475 output.append(so.strip())
474 476 return '\n'.join(output)
475 477
476 478 @LazyProperty
477 479 def _changes_cache(self):
478 480 added = set()
479 481 modified = set()
480 482 deleted = set()
481 483 _r = self.repository._repo
482 484
483 485 parents = self.parents
484 486 if not self.parents:
485 487 parents = [EmptyChangeset()]
486 488 for parent in parents:
487 489 if isinstance(parent, EmptyChangeset):
488 490 oid = None
489 491 else:
490 492 oid = _r[parent.raw_id].tree
491 493 changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree)
492 494 for (oldpath, newpath), (_, _), (_, _) in changes:
493 495 if newpath and oldpath:
494 496 modified.add(newpath)
495 497 elif newpath and not oldpath:
496 498 added.add(newpath)
497 499 elif not newpath and oldpath:
498 500 deleted.add(oldpath)
499 501 return added, modified, deleted
500 502
501 503 def _get_paths_for_status(self, status):
502 504 """
503 505 Returns sorted list of paths for given ``status``.
504 506
505 507 :param status: one of: *added*, *modified* or *deleted*
506 508 """
507 509 a, m, d = self._changes_cache
508 510 return sorted({
509 511 'added': list(a),
510 512 'modified': list(m),
511 513 'deleted': list(d)}[status]
512 514 )
513 515
514 516 @LazyProperty
515 517 def added(self):
516 518 """
517 519 Returns list of added ``FileNode`` objects.
518 520 """
519 521 if not self.parents:
520 522 return list(self._get_file_nodes())
521 523 return AddedFileNodesGenerator([n for n in
522 524 self._get_paths_for_status('added')], self)
523 525
524 526 @LazyProperty
525 527 def changed(self):
526 528 """
527 529 Returns list of modified ``FileNode`` objects.
528 530 """
529 531 if not self.parents:
530 532 return []
531 533 return ChangedFileNodesGenerator([n for n in
532 534 self._get_paths_for_status('modified')], self)
533 535
534 536 @LazyProperty
535 537 def removed(self):
536 538 """
537 539 Returns list of removed ``FileNode`` objects.
538 540 """
539 541 if not self.parents:
540 542 return []
541 543 return RemovedFileNodesGenerator([n for n in
542 544 self._get_paths_for_status('deleted')], self)
@@ -1,669 +1,673 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 vcs.backends.git
4 4 ~~~~~~~~~~~~~~~~
5 5
6 6 Git backend implementation.
7 7
8 8 :created_on: Apr 8, 2010
9 9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 10 """
11 11
12 12 import os
13 13 import re
14 14 import time
15 15 import posixpath
16 16 import logging
17 17 import traceback
18 18 import urllib
19 19 import urllib2
20 20 from dulwich.repo import Repo, NotGitRepository
21 21 from dulwich.objects import Tag
22 22 from string import Template
23 from subprocess import Popen, PIPE
23
24 import rhodecode
24 25 from rhodecode.lib.vcs.backends.base import BaseRepository
25 26 from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
26 27 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
27 28 from rhodecode.lib.vcs.exceptions import EmptyRepositoryError
28 29 from rhodecode.lib.vcs.exceptions import RepositoryError
29 30 from rhodecode.lib.vcs.exceptions import TagAlreadyExistError
30 31 from rhodecode.lib.vcs.exceptions import TagDoesNotExistError
31 32 from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
32 33 from rhodecode.lib.vcs.utils.lazy import LazyProperty, ThreadLocalLazyProperty
33 34 from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
34 35 from rhodecode.lib.vcs.utils.paths import abspath
35 36 from rhodecode.lib.vcs.utils.paths import get_user_home
36 37 from .workdir import GitWorkdir
37 38 from .changeset import GitChangeset
38 39 from .inmemory import GitInMemoryChangeset
39 40 from .config import ConfigFile
40 41 from rhodecode.lib import subprocessio
41 42
42 43
43 44 log = logging.getLogger(__name__)
44 45
45 46
46 47 class GitRepository(BaseRepository):
47 48 """
48 49 Git repository backend.
49 50 """
50 51 DEFAULT_BRANCH_NAME = 'master'
51 52 scm = 'git'
52 53
53 54 def __init__(self, repo_path, create=False, src_url=None,
54 55 update_after_clone=False, bare=False):
55 56
56 57 self.path = abspath(repo_path)
57 58 repo = self._get_repo(create, src_url, update_after_clone, bare)
58 59 self.bare = repo.bare
59 60
60 61 self._config_files = [
61 62 bare and abspath(self.path, 'config')
62 63 or abspath(self.path, '.git', 'config'),
63 64 abspath(get_user_home(), '.gitconfig'),
64 65 ]
65 66
66 67 @ThreadLocalLazyProperty
67 68 def _repo(self):
68 69 repo = Repo(self.path)
69 70 #temporary set that to now at later we will move it to constructor
70 71 baseui = None
71 72 if baseui is None:
72 73 from mercurial.ui import ui
73 74 baseui = ui()
74 75 # patch the instance of GitRepo with an "FAKE" ui object to add
75 76 # compatibility layer with Mercurial
76 77 setattr(repo, 'ui', baseui)
77 78 return repo
78 79
79 80 @property
80 81 def head(self):
81 82 try:
82 83 return self._repo.head()
83 84 except KeyError:
84 85 return None
85 86
86 87 @LazyProperty
87 88 def revisions(self):
88 89 """
89 90 Returns list of revisions' ids, in ascending order. Being lazy
90 91 attribute allows external tools to inject shas from cache.
91 92 """
92 93 return self._get_all_revisions()
93 94
94 def run_git_command(self, cmd):
95 @classmethod
96 def _run_git_command(cls, cmd, **opts):
95 97 """
96 98 Runs given ``cmd`` as git command and returns tuple
97 (returncode, stdout, stderr).
98
99 .. note::
100 This method exists only until log/blame functionality is implemented
101 at Dulwich (see https://bugs.launchpad.net/bugs/645142). Parsing
102 os command's output is road to hell...
99 (stdout, stderr).
103 100
104 101 :param cmd: git command to be executed
102 :param opts: env options to pass into Subprocess command
105 103 """
106 104
107 105 _copts = ['-c', 'core.quotepath=false', ]
108 106 _str_cmd = False
109 107 if isinstance(cmd, basestring):
110 108 cmd = [cmd]
111 109 _str_cmd = True
112 110
113 111 gitenv = os.environ
114 112 # need to clean fix GIT_DIR !
115 113 if 'GIT_DIR' in gitenv:
116 114 del gitenv['GIT_DIR']
117 115 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
118 116
119 cmd = ['git'] + _copts + cmd
117 _git_path = rhodecode.CONFIG.get('git_path', 'git')
118 cmd = [_git_path] + _copts + cmd
120 119 if _str_cmd:
121 120 cmd = ' '.join(cmd)
122 121 try:
123 opts = dict(
122 _opts = dict(
124 123 env=gitenv,
125 124 shell=False,
126 125 )
127 if os.path.isdir(self.path):
128 opts['cwd'] = self.path
129 p = subprocessio.SubprocessIOChunker(cmd, **opts)
126 _opts.update(opts)
127 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
130 128 except (EnvironmentError, OSError), err:
131 129 log.error(traceback.format_exc())
132 130 raise RepositoryError("Couldn't run git command (%s).\n"
133 131 "Original error was:%s" % (cmd, err))
134 132
135 133 return ''.join(p.output), ''.join(p.error)
136 134
135 def run_git_command(self, cmd):
136 opts = {}
137 if os.path.isdir(self.path):
138 opts['cwd'] = self.path
139 return self._run_git_command(cmd, **opts)
140
137 141 @classmethod
138 142 def _check_url(cls, url):
139 143 """
140 144 Functon will check given url and try to verify if it's a valid
141 145 link. Sometimes it may happened that mercurial will issue basic
142 146 auth request that can cause whole API to hang when used from python
143 147 or other external calls.
144 148
145 149 On failures it'll raise urllib2.HTTPError
146 150 """
147 151 from mercurial.util import url as Url
148 152
149 153 # those authnadlers are patched for python 2.6.5 bug an
150 154 # infinit looping when given invalid resources
151 155 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
152 156
153 157 # check first if it's not an local url
154 158 if os.path.isdir(url) or url.startswith('file:'):
155 159 return True
156 160
157 161 if('+' in url[:url.find('://')]):
158 162 url = url[url.find('+') + 1:]
159 163
160 164 handlers = []
161 165 test_uri, authinfo = Url(url).authinfo()
162 166 if not test_uri.endswith('info/refs'):
163 167 test_uri = test_uri.rstrip('/') + '/info/refs'
164 168 if authinfo:
165 169 #create a password manager
166 170 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
167 171 passmgr.add_password(*authinfo)
168 172
169 173 handlers.extend((httpbasicauthhandler(passmgr),
170 174 httpdigestauthhandler(passmgr)))
171 175
172 176 o = urllib2.build_opener(*handlers)
173 177 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
174 178
175 179 q = {"service": 'git-upload-pack'}
176 180 qs = '?%s' % urllib.urlencode(q)
177 181 cu = "%s%s" % (test_uri, qs)
178 182 req = urllib2.Request(cu, None, {})
179 183
180 184 try:
181 185 resp = o.open(req)
182 186 return resp.code == 200
183 187 except Exception, e:
184 188 # means it cannot be cloned
185 189 raise urllib2.URLError("[%s] %s" % (url, e))
186 190
187 191 def _get_repo(self, create, src_url=None, update_after_clone=False,
188 192 bare=False):
189 193 if create and os.path.exists(self.path):
190 194 raise RepositoryError("Location already exist")
191 195 if src_url and not create:
192 196 raise RepositoryError("Create should be set to True if src_url is "
193 197 "given (clone operation creates repository)")
194 198 try:
195 199 if create and src_url:
196 200 GitRepository._check_url(src_url)
197 201 self.clone(src_url, update_after_clone, bare)
198 202 return Repo(self.path)
199 203 elif create:
200 204 os.mkdir(self.path)
201 205 if bare:
202 206 return Repo.init_bare(self.path)
203 207 else:
204 208 return Repo.init(self.path)
205 209 else:
206 210 return Repo(self.path)
207 211 except (NotGitRepository, OSError), err:
208 212 raise RepositoryError(err)
209 213
210 214 def _get_all_revisions(self):
211 215 # we must check if this repo is not empty, since later command
212 216 # fails if it is. And it's cheaper to ask than throw the subprocess
213 217 # errors
214 218 try:
215 219 self._repo.head()
216 220 except KeyError:
217 221 return []
218 222 cmd = 'rev-list --all --reverse --date-order'
219 223 try:
220 224 so, se = self.run_git_command(cmd)
221 225 except RepositoryError:
222 226 # Can be raised for empty repositories
223 227 return []
224 228 return so.splitlines()
225 229
226 230 def _get_all_revisions2(self):
227 231 #alternate implementation using dulwich
228 232 includes = [x[1][0] for x in self._parsed_refs.iteritems()
229 233 if x[1][1] != 'T']
230 234 return [c.commit.id for c in self._repo.get_walker(include=includes)]
231 235
232 236 def _get_revision(self, revision):
233 237 """
234 238 For git backend we always return integer here. This way we ensure
235 239 that changset's revision attribute would become integer.
236 240 """
237 241 pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
238 242 is_bstr = lambda o: isinstance(o, (str, unicode))
239 243 is_null = lambda o: len(o) == revision.count('0')
240 244
241 245 if len(self.revisions) == 0:
242 246 raise EmptyRepositoryError("There are no changesets yet")
243 247
244 248 if revision in (None, '', 'tip', 'HEAD', 'head', -1):
245 249 revision = self.revisions[-1]
246 250
247 251 if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12)
248 252 or isinstance(revision, int) or is_null(revision)):
249 253 try:
250 254 revision = self.revisions[int(revision)]
251 255 except:
252 256 raise ChangesetDoesNotExistError("Revision %r does not exist "
253 257 "for this repository %s" % (revision, self))
254 258
255 259 elif is_bstr(revision):
256 260 # get by branch/tag name
257 261 _ref_revision = self._parsed_refs.get(revision)
258 262 _tags_shas = self.tags.values()
259 263 if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']:
260 264 return _ref_revision[0]
261 265
262 266 # maybe it's a tag ? we don't have them in self.revisions
263 267 elif revision in _tags_shas:
264 268 return _tags_shas[_tags_shas.index(revision)]
265 269
266 270 elif not pattern.match(revision) or revision not in self.revisions:
267 271 raise ChangesetDoesNotExistError("Revision %r does not exist "
268 272 "for this repository %s" % (revision, self))
269 273
270 274 # Ensure we return full id
271 275 if not pattern.match(str(revision)):
272 276 raise ChangesetDoesNotExistError("Given revision %r not recognized"
273 277 % revision)
274 278 return revision
275 279
276 280 def _get_archives(self, archive_name='tip'):
277 281
278 282 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
279 283 yield {"type": i[0], "extension": i[1], "node": archive_name}
280 284
281 285 def _get_url(self, url):
282 286 """
283 287 Returns normalized url. If schema is not given, would fall to
284 288 filesystem (``file:///``) schema.
285 289 """
286 290 url = str(url)
287 291 if url != 'default' and not '://' in url:
288 292 url = ':///'.join(('file', url))
289 293 return url
290 294
291 295 @LazyProperty
292 296 def name(self):
293 297 return os.path.basename(self.path)
294 298
295 299 @LazyProperty
296 300 def last_change(self):
297 301 """
298 302 Returns last change made on this repository as datetime object
299 303 """
300 304 return date_fromtimestamp(self._get_mtime(), makedate()[1])
301 305
302 306 def _get_mtime(self):
303 307 try:
304 308 return time.mktime(self.get_changeset().date.timetuple())
305 309 except RepositoryError:
306 310 idx_loc = '' if self.bare else '.git'
307 311 # fallback to filesystem
308 312 in_path = os.path.join(self.path, idx_loc, "index")
309 313 he_path = os.path.join(self.path, idx_loc, "HEAD")
310 314 if os.path.exists(in_path):
311 315 return os.stat(in_path).st_mtime
312 316 else:
313 317 return os.stat(he_path).st_mtime
314 318
315 319 @LazyProperty
316 320 def description(self):
317 321 idx_loc = '' if self.bare else '.git'
318 322 undefined_description = u'unknown'
319 323 description_path = os.path.join(self.path, idx_loc, 'description')
320 324 if os.path.isfile(description_path):
321 325 return safe_unicode(open(description_path).read())
322 326 else:
323 327 return undefined_description
324 328
325 329 @LazyProperty
326 330 def contact(self):
327 331 undefined_contact = u'Unknown'
328 332 return undefined_contact
329 333
330 334 @property
331 335 def branches(self):
332 336 if not self.revisions:
333 337 return {}
334 338 sortkey = lambda ctx: ctx[0]
335 339 _branches = [(x[0], x[1][0])
336 340 for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
337 341 return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
338 342
339 343 @LazyProperty
340 344 def tags(self):
341 345 return self._get_tags()
342 346
343 347 def _get_tags(self):
344 348 if not self.revisions:
345 349 return {}
346 350
347 351 sortkey = lambda ctx: ctx[0]
348 352 _tags = [(x[0], x[1][0])
349 353 for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
350 354 return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
351 355
352 356 def tag(self, name, user, revision=None, message=None, date=None,
353 357 **kwargs):
354 358 """
355 359 Creates and returns a tag for the given ``revision``.
356 360
357 361 :param name: name for new tag
358 362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
359 363 :param revision: changeset id for which new tag would be created
360 364 :param message: message of the tag's commit
361 365 :param date: date of tag's commit
362 366
363 367 :raises TagAlreadyExistError: if tag with same name already exists
364 368 """
365 369 if name in self.tags:
366 370 raise TagAlreadyExistError("Tag %s already exists" % name)
367 371 changeset = self.get_changeset(revision)
368 372 message = message or "Added tag %s for commit %s" % (name,
369 373 changeset.raw_id)
370 374 self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
371 375
372 376 self._parsed_refs = self._get_parsed_refs()
373 377 self.tags = self._get_tags()
374 378 return changeset
375 379
376 380 def remove_tag(self, name, user, message=None, date=None):
377 381 """
378 382 Removes tag with the given ``name``.
379 383
380 384 :param name: name of the tag to be removed
381 385 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
382 386 :param message: message of the tag's removal commit
383 387 :param date: date of tag's removal commit
384 388
385 389 :raises TagDoesNotExistError: if tag with given name does not exists
386 390 """
387 391 if name not in self.tags:
388 392 raise TagDoesNotExistError("Tag %s does not exist" % name)
389 393 tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
390 394 try:
391 395 os.remove(tagpath)
392 396 self._parsed_refs = self._get_parsed_refs()
393 397 self.tags = self._get_tags()
394 398 except OSError, e:
395 399 raise RepositoryError(e.strerror)
396 400
397 401 @LazyProperty
398 402 def _parsed_refs(self):
399 403 return self._get_parsed_refs()
400 404
401 405 def _get_parsed_refs(self):
402 406 refs = self._repo.get_refs()
403 407 keys = [('refs/heads/', 'H'),
404 408 ('refs/remotes/origin/', 'RH'),
405 409 ('refs/tags/', 'T')]
406 410 _refs = {}
407 411 for ref, sha in refs.iteritems():
408 412 for k, type_ in keys:
409 413 if ref.startswith(k):
410 414 _key = ref[len(k):]
411 415 if type_ == 'T':
412 416 obj = self._repo.get_object(sha)
413 417 if isinstance(obj, Tag):
414 418 sha = self._repo.get_object(sha).object[1]
415 419 _refs[_key] = [sha, type_]
416 420 break
417 421 return _refs
418 422
419 423 def _heads(self, reverse=False):
420 424 refs = self._repo.get_refs()
421 425 heads = {}
422 426
423 427 for key, val in refs.items():
424 428 for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
425 429 if key.startswith(ref_key):
426 430 n = key[len(ref_key):]
427 431 if n not in ['HEAD']:
428 432 heads[n] = val
429 433
430 434 return heads if reverse else dict((y, x) for x, y in heads.iteritems())
431 435
432 436 def get_changeset(self, revision=None):
433 437 """
434 438 Returns ``GitChangeset`` object representing commit from git repository
435 439 at the given revision or head (most recent commit) if None given.
436 440 """
437 441 if isinstance(revision, GitChangeset):
438 442 return revision
439 443 revision = self._get_revision(revision)
440 444 changeset = GitChangeset(repository=self, revision=revision)
441 445 return changeset
442 446
443 447 def get_changesets(self, start=None, end=None, start_date=None,
444 448 end_date=None, branch_name=None, reverse=False):
445 449 """
446 450 Returns iterator of ``GitChangeset`` objects from start to end (both
447 451 are inclusive), in ascending date order (unless ``reverse`` is set).
448 452
449 453 :param start: changeset ID, as str; first returned changeset
450 454 :param end: changeset ID, as str; last returned changeset
451 455 :param start_date: if specified, changesets with commit date less than
452 456 ``start_date`` would be filtered out from returned set
453 457 :param end_date: if specified, changesets with commit date greater than
454 458 ``end_date`` would be filtered out from returned set
455 459 :param branch_name: if specified, changesets not reachable from given
456 460 branch would be filtered out from returned set
457 461 :param reverse: if ``True``, returned generator would be reversed
458 462 (meaning that returned changesets would have descending date order)
459 463
460 464 :raise BranchDoesNotExistError: If given ``branch_name`` does not
461 465 exist.
462 466 :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
463 467 ``end`` could not be found.
464 468
465 469 """
466 470 if branch_name and branch_name not in self.branches:
467 471 raise BranchDoesNotExistError("Branch '%s' not found" \
468 472 % branch_name)
469 473 # %H at format means (full) commit hash, initial hashes are retrieved
470 474 # in ascending date order
471 475 cmd_template = 'log --date-order --reverse --pretty=format:"%H"'
472 476 cmd_params = {}
473 477 if start_date:
474 478 cmd_template += ' --since "$since"'
475 479 cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S')
476 480 if end_date:
477 481 cmd_template += ' --until "$until"'
478 482 cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S')
479 483 if branch_name:
480 484 cmd_template += ' $branch_name'
481 485 cmd_params['branch_name'] = branch_name
482 486 else:
483 487 cmd_template += ' --all'
484 488
485 489 cmd = Template(cmd_template).safe_substitute(**cmd_params)
486 490 revs = self.run_git_command(cmd)[0].splitlines()
487 491 start_pos = 0
488 492 end_pos = len(revs)
489 493 if start:
490 494 _start = self._get_revision(start)
491 495 try:
492 496 start_pos = revs.index(_start)
493 497 except ValueError:
494 498 pass
495 499
496 500 if end is not None:
497 501 _end = self._get_revision(end)
498 502 try:
499 503 end_pos = revs.index(_end)
500 504 except ValueError:
501 505 pass
502 506
503 507 if None not in [start, end] and start_pos > end_pos:
504 508 raise RepositoryError('start cannot be after end')
505 509
506 510 if end_pos is not None:
507 511 end_pos += 1
508 512
509 513 revs = revs[start_pos:end_pos]
510 514 if reverse:
511 515 revs = reversed(revs)
512 516 for rev in revs:
513 517 yield self.get_changeset(rev)
514 518
515 519 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
516 520 context=3):
517 521 """
518 522 Returns (git like) *diff*, as plain text. Shows changes introduced by
519 523 ``rev2`` since ``rev1``.
520 524
521 525 :param rev1: Entry point from which diff is shown. Can be
522 526 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
523 527 the changes since empty state of the repository until ``rev2``
524 528 :param rev2: Until which revision changes should be shown.
525 529 :param ignore_whitespace: If set to ``True``, would not show whitespace
526 530 changes. Defaults to ``False``.
527 531 :param context: How many lines before/after changed lines should be
528 532 shown. Defaults to ``3``.
529 533 """
530 534 flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40']
531 535 if ignore_whitespace:
532 536 flags.append('-w')
533 537
534 538 if hasattr(rev1, 'raw_id'):
535 539 rev1 = getattr(rev1, 'raw_id')
536 540
537 541 if hasattr(rev2, 'raw_id'):
538 542 rev2 = getattr(rev2, 'raw_id')
539 543
540 544 if rev1 == self.EMPTY_CHANGESET:
541 545 rev2 = self.get_changeset(rev2).raw_id
542 546 cmd = ' '.join(['show'] + flags + [rev2])
543 547 else:
544 548 rev1 = self.get_changeset(rev1).raw_id
545 549 rev2 = self.get_changeset(rev2).raw_id
546 550 cmd = ' '.join(['diff'] + flags + [rev1, rev2])
547 551
548 552 if path:
549 553 cmd += ' -- "%s"' % path
550 554
551 555 stdout, stderr = self.run_git_command(cmd)
552 556 # If we used 'show' command, strip first few lines (until actual diff
553 557 # starts)
554 558 if rev1 == self.EMPTY_CHANGESET:
555 559 lines = stdout.splitlines()
556 560 x = 0
557 561 for line in lines:
558 562 if line.startswith('diff'):
559 563 break
560 564 x += 1
561 565 # Append new line just like 'diff' command do
562 566 stdout = '\n'.join(lines[x:]) + '\n'
563 567 return stdout
564 568
565 569 @LazyProperty
566 570 def in_memory_changeset(self):
567 571 """
568 572 Returns ``GitInMemoryChangeset`` object for this repository.
569 573 """
570 574 return GitInMemoryChangeset(self)
571 575
572 576 def clone(self, url, update_after_clone=True, bare=False):
573 577 """
574 578 Tries to clone changes from external location.
575 579
576 580 :param update_after_clone: If set to ``False``, git won't checkout
577 581 working directory
578 582 :param bare: If set to ``True``, repository would be cloned into
579 583 *bare* git repository (no working directory at all).
580 584 """
581 585 url = self._get_url(url)
582 586 cmd = ['clone']
583 587 if bare:
584 588 cmd.append('--bare')
585 589 elif not update_after_clone:
586 590 cmd.append('--no-checkout')
587 591 cmd += ['--', '"%s"' % url, '"%s"' % self.path]
588 592 cmd = ' '.join(cmd)
589 593 # If error occurs run_git_command raises RepositoryError already
590 594 self.run_git_command(cmd)
591 595
592 596 def pull(self, url):
593 597 """
594 598 Tries to pull changes from external location.
595 599 """
596 600 url = self._get_url(url)
597 601 cmd = ['pull']
598 602 cmd.append("--ff-only")
599 603 cmd.append(url)
600 604 cmd = ' '.join(cmd)
601 605 # If error occurs run_git_command raises RepositoryError already
602 606 self.run_git_command(cmd)
603 607
604 608 def fetch(self, url):
605 609 """
606 610 Tries to pull changes from external location.
607 611 """
608 612 url = self._get_url(url)
609 613 so, se = self.run_git_command('ls-remote -h %s' % url)
610 614 refs = []
611 615 for line in (x for x in so.splitlines()):
612 616 sha, ref = line.split('\t')
613 617 refs.append(ref)
614 618 refs = ' '.join(('+%s:%s' % (r, r) for r in refs))
615 619 cmd = '''fetch %s -- %s''' % (url, refs)
616 620 self.run_git_command(cmd)
617 621
618 622 @LazyProperty
619 623 def workdir(self):
620 624 """
621 625 Returns ``Workdir`` instance for this repository.
622 626 """
623 627 return GitWorkdir(self)
624 628
625 629 def get_config_value(self, section, name, config_file=None):
626 630 """
627 631 Returns configuration value for a given [``section``] and ``name``.
628 632
629 633 :param section: Section we want to retrieve value from
630 634 :param name: Name of configuration we want to retrieve
631 635 :param config_file: A path to file which should be used to retrieve
632 636 configuration from (might also be a list of file paths)
633 637 """
634 638 if config_file is None:
635 639 config_file = []
636 640 elif isinstance(config_file, basestring):
637 641 config_file = [config_file]
638 642
639 643 def gen_configs():
640 644 for path in config_file + self._config_files:
641 645 try:
642 646 yield ConfigFile.from_path(path)
643 647 except (IOError, OSError, ValueError):
644 648 continue
645 649
646 650 for config in gen_configs():
647 651 try:
648 652 return config.get(section, name)
649 653 except KeyError:
650 654 continue
651 655 return None
652 656
653 657 def get_user_name(self, config_file=None):
654 658 """
655 659 Returns user's name from global configuration file.
656 660
657 661 :param config_file: A path to file which should be used to retrieve
658 662 configuration from (might also be a list of file paths)
659 663 """
660 664 return self.get_config_value('user', 'name', config_file)
661 665
662 666 def get_user_email(self, config_file=None):
663 667 """
664 668 Returns user's email from global configuration file.
665 669
666 670 :param config_file: A path to file which should be used to retrieve
667 671 configuration from (might also be a list of file paths)
668 672 """
669 673 return self.get_config_value('user', 'email', config_file)
General Comments 0
You need to be logged in to leave comments. Login now