Show More
@@ -1,477 +1,481 b'' | |||||
1 | ################################################################################ |
|
1 | ################################################################################ | |
2 | ################################################################################ |
|
2 | ################################################################################ | |
3 | # RhodeCode - Pylons environment configuration # |
|
3 | # RhodeCode - Pylons environment configuration # | |
4 | # # |
|
4 | # # | |
5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
5 | # The %(here)s variable will be replaced with the parent directory of this file# | |
6 | ################################################################################ |
|
6 | ################################################################################ | |
7 |
|
7 | |||
8 | [DEFAULT] |
|
8 | [DEFAULT] | |
9 | debug = true |
|
9 | debug = true | |
10 | pdebug = false |
|
10 | pdebug = false | |
11 | ################################################################################ |
|
11 | ################################################################################ | |
12 | ## Uncomment and replace with the address which should receive ## |
|
12 | ## Uncomment and replace with the address which should receive ## | |
13 | ## any error reports after application crash ## |
|
13 | ## any error reports after application crash ## | |
14 | ## Additionally those settings will be used by RhodeCode mailing system ## |
|
14 | ## Additionally those settings will be used by RhodeCode mailing system ## | |
15 | ################################################################################ |
|
15 | ################################################################################ | |
16 | #email_to = admin@localhost |
|
16 | #email_to = admin@localhost | |
17 | #error_email_from = paste_error@localhost |
|
17 | #error_email_from = paste_error@localhost | |
18 | #app_email_from = rhodecode-noreply@localhost |
|
18 | #app_email_from = rhodecode-noreply@localhost | |
19 | #error_message = |
|
19 | #error_message = | |
20 | #email_prefix = [RhodeCode] |
|
20 | #email_prefix = [RhodeCode] | |
21 |
|
21 | |||
22 | #smtp_server = mail.server.com |
|
22 | #smtp_server = mail.server.com | |
23 | #smtp_username = |
|
23 | #smtp_username = | |
24 | #smtp_password = |
|
24 | #smtp_password = | |
25 | #smtp_port = |
|
25 | #smtp_port = | |
26 | #smtp_use_tls = false |
|
26 | #smtp_use_tls = false | |
27 | #smtp_use_ssl = true |
|
27 | #smtp_use_ssl = true | |
28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) | |
29 | #smtp_auth = |
|
29 | #smtp_auth = | |
30 |
|
30 | |||
31 | [server:main] |
|
31 | [server:main] | |
32 | ## PASTE |
|
32 | ## PASTE | |
33 | ## nr of threads to spawn |
|
33 | ## nr of threads to spawn | |
34 | #threadpool_workers = 5 |
|
34 | #threadpool_workers = 5 | |
35 |
|
35 | |||
36 | ## max request before thread respawn |
|
36 | ## max request before thread respawn | |
37 | #threadpool_max_requests = 10 |
|
37 | #threadpool_max_requests = 10 | |
38 |
|
38 | |||
39 | ## option to use threads of process |
|
39 | ## option to use threads of process | |
40 | #use_threadpool = true |
|
40 | #use_threadpool = true | |
41 |
|
41 | |||
42 | #use = egg:Paste#http |
|
42 | #use = egg:Paste#http | |
43 |
|
43 | |||
44 | ## WAITRESS |
|
44 | ## WAITRESS | |
45 | threads = 5 |
|
45 | threads = 5 | |
46 | ## 100GB |
|
46 | ## 100GB | |
47 | max_request_body_size = 107374182400 |
|
47 | max_request_body_size = 107374182400 | |
48 | use = egg:waitress#main |
|
48 | use = egg:waitress#main | |
49 |
|
49 | |||
50 | host = 0.0.0.0 |
|
50 | host = 0.0.0.0 | |
51 | port = 5000 |
|
51 | port = 5000 | |
52 |
|
52 | |||
53 | ## prefix middleware for rc |
|
53 | ## prefix middleware for rc | |
54 | #[filter:proxy-prefix] |
|
54 | #[filter:proxy-prefix] | |
55 | #use = egg:PasteDeploy#prefix |
|
55 | #use = egg:PasteDeploy#prefix | |
56 | #prefix = /<your-prefix> |
|
56 | #prefix = /<your-prefix> | |
57 |
|
57 | |||
58 | [app:main] |
|
58 | [app:main] | |
59 | use = egg:rhodecode |
|
59 | use = egg:rhodecode | |
60 | ## enable proxy prefix middleware |
|
60 | ## enable proxy prefix middleware | |
61 | #filter-with = proxy-prefix |
|
61 | #filter-with = proxy-prefix | |
62 |
|
62 | |||
63 | full_stack = true |
|
63 | full_stack = true | |
64 | static_files = true |
|
64 | static_files = true | |
65 | ## Optional Languages |
|
65 | ## Optional Languages | |
66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl |
|
66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl | |
67 | lang = en |
|
67 | lang = en | |
68 | cache_dir = %(here)s/data |
|
68 | cache_dir = %(here)s/data | |
69 | index_dir = %(here)s/data/index |
|
69 | index_dir = %(here)s/data/index | |
70 |
|
70 | |||
71 | ## uncomment and set this path to use archive download cache |
|
71 | ## uncomment and set this path to use archive download cache | |
72 | #archive_cache_dir = /tmp/tarballcache |
|
72 | #archive_cache_dir = /tmp/tarballcache | |
73 |
|
73 | |||
74 | ## change this to unique ID for security |
|
74 | ## change this to unique ID for security | |
75 | app_instance_uuid = rc-production |
|
75 | app_instance_uuid = rc-production | |
76 |
|
76 | |||
77 | ## cut off limit for large diffs (size in bytes) |
|
77 | ## cut off limit for large diffs (size in bytes) | |
78 | cut_off_limit = 256000 |
|
78 | cut_off_limit = 256000 | |
79 |
|
79 | |||
80 | ## use cache version of scm repo everywhere |
|
80 | ## use cache version of scm repo everywhere | |
81 | vcs_full_cache = true |
|
81 | vcs_full_cache = true | |
82 |
|
82 | |||
83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https | |
84 | force_https = false |
|
84 | force_https = false | |
85 |
|
85 | |||
86 | ## use Strict-Transport-Security headers |
|
86 | ## use Strict-Transport-Security headers | |
87 | use_htsts = false |
|
87 | use_htsts = false | |
88 |
|
88 | |||
89 | ## number of commits stats will parse on each iteration |
|
89 | ## number of commits stats will parse on each iteration | |
90 | commit_parse_limit = 25 |
|
90 | commit_parse_limit = 25 | |
91 |
|
91 | |||
92 | ## number of items displayed in lightweight dashboard before paginating is shown |
|
92 | ## number of items displayed in lightweight dashboard before paginating is shown | |
93 | dashboard_items = 100 |
|
93 | dashboard_items = 100 | |
94 |
|
94 | |||
95 | ## use gravatar service to display avatars |
|
95 | ## use gravatar service to display avatars | |
96 | use_gravatar = true |
|
96 | use_gravatar = true | |
97 |
|
97 | |||
98 | ## path to git executable |
|
98 | ## path to git executable | |
99 | git_path = git |
|
99 | git_path = git | |
100 |
|
100 | |||
|
101 | ## git rev filter option, --all is the default filter, if you need to | |||
|
102 | ## hide all refs in changelog switch this to --branches --tags | |||
|
103 | git_rev_filter=--all | |||
|
104 | ||||
101 | ## RSS feed options |
|
105 | ## RSS feed options | |
102 | rss_cut_off_limit = 256000 |
|
106 | rss_cut_off_limit = 256000 | |
103 | rss_items_per_page = 10 |
|
107 | rss_items_per_page = 10 | |
104 | rss_include_diff = false |
|
108 | rss_include_diff = false | |
105 |
|
109 | |||
106 | ## show hash options for changelog |
|
110 | ## show hash options for changelog | |
107 | sha_len = 12 |
|
111 | sha_len = 12 | |
108 | sha_rev = true |
|
112 | sha_rev = true | |
109 |
|
113 | |||
110 |
|
114 | |||
111 | ## alternative_gravatar_url allows you to use your own avatar server application |
|
115 | ## alternative_gravatar_url allows you to use your own avatar server application | |
112 | ## the following parts of the URL will be replaced |
|
116 | ## the following parts of the URL will be replaced | |
113 | ## {email} user email |
|
117 | ## {email} user email | |
114 | ## {md5email} md5 hash of the user email (like at gravatar.com) |
|
118 | ## {md5email} md5 hash of the user email (like at gravatar.com) | |
115 | ## {size} size of the image that is expected from the server application |
|
119 | ## {size} size of the image that is expected from the server application | |
116 | ## {scheme} http/https from RhodeCode server |
|
120 | ## {scheme} http/https from RhodeCode server | |
117 | ## {netloc} network location from RhodeCode server |
|
121 | ## {netloc} network location from RhodeCode server | |
118 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} |
|
122 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} | |
119 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} |
|
123 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} | |
120 |
|
124 | |||
121 |
|
125 | |||
122 | ## container auth options |
|
126 | ## container auth options | |
123 | container_auth_enabled = false |
|
127 | container_auth_enabled = false | |
124 | proxypass_auth_enabled = false |
|
128 | proxypass_auth_enabled = false | |
125 |
|
129 | |||
126 | ## default encoding used to convert from and to unicode |
|
130 | ## default encoding used to convert from and to unicode | |
127 | ## can be also a comma seperated list of encoding in case of mixed encodings |
|
131 | ## can be also a comma seperated list of encoding in case of mixed encodings | |
128 | default_encoding = utf8 |
|
132 | default_encoding = utf8 | |
129 |
|
133 | |||
130 | ## overwrite schema of clone url |
|
134 | ## overwrite schema of clone url | |
131 | ## available vars: |
|
135 | ## available vars: | |
132 | ## scheme - http/https |
|
136 | ## scheme - http/https | |
133 | ## user - current user |
|
137 | ## user - current user | |
134 | ## pass - password |
|
138 | ## pass - password | |
135 | ## netloc - network location |
|
139 | ## netloc - network location | |
136 | ## path - usually repo_name |
|
140 | ## path - usually repo_name | |
137 |
|
141 | |||
138 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} |
|
142 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} | |
139 |
|
143 | |||
140 | ## issue tracking mapping for commits messages |
|
144 | ## issue tracking mapping for commits messages | |
141 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
145 | ## comment out issue_pat, issue_server, issue_prefix to enable | |
142 |
|
146 | |||
143 | ## pattern to get the issues from commit messages |
|
147 | ## pattern to get the issues from commit messages | |
144 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
148 | ## default one used here is #<numbers> with a regex passive group for `#` | |
145 | ## {id} will be all groups matched from this pattern |
|
149 | ## {id} will be all groups matched from this pattern | |
146 |
|
150 | |||
147 | issue_pat = (?:\s*#)(\d+) |
|
151 | issue_pat = (?:\s*#)(\d+) | |
148 |
|
152 | |||
149 | ## server url to the issue, each {id} will be replaced with match |
|
153 | ## server url to the issue, each {id} will be replaced with match | |
150 | ## fetched from the regex and {repo} is replaced with full repository name |
|
154 | ## fetched from the regex and {repo} is replaced with full repository name | |
151 | ## including groups {repo_name} is replaced with just name of repo |
|
155 | ## including groups {repo_name} is replaced with just name of repo | |
152 |
|
156 | |||
153 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} |
|
157 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} | |
154 |
|
158 | |||
155 | ## prefix to add to link to indicate it's an url |
|
159 | ## prefix to add to link to indicate it's an url | |
156 | ## #314 will be replaced by <issue_prefix><id> |
|
160 | ## #314 will be replaced by <issue_prefix><id> | |
157 |
|
161 | |||
158 | issue_prefix = # |
|
162 | issue_prefix = # | |
159 |
|
163 | |||
160 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
164 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify | |
161 | ## multiple patterns, to other issues server, wiki or others |
|
165 | ## multiple patterns, to other issues server, wiki or others | |
162 | ## below an example how to create a wiki pattern |
|
166 | ## below an example how to create a wiki pattern | |
163 | # #wiki-some-id -> https://mywiki.com/some-id |
|
167 | # #wiki-some-id -> https://mywiki.com/some-id | |
164 |
|
168 | |||
165 | #issue_pat_wiki = (?:wiki-)(.+) |
|
169 | #issue_pat_wiki = (?:wiki-)(.+) | |
166 | #issue_server_link_wiki = https://mywiki.com/{id} |
|
170 | #issue_server_link_wiki = https://mywiki.com/{id} | |
167 | #issue_prefix_wiki = WIKI- |
|
171 | #issue_prefix_wiki = WIKI- | |
168 |
|
172 | |||
169 |
|
173 | |||
170 | ## instance-id prefix |
|
174 | ## instance-id prefix | |
171 | ## a prefix key for this instance used for cache invalidation when running |
|
175 | ## a prefix key for this instance used for cache invalidation when running | |
172 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
176 | ## multiple instances of rhodecode, make sure it's globally unique for | |
173 | ## all running rhodecode instances. Leave empty if you don't use it |
|
177 | ## all running rhodecode instances. Leave empty if you don't use it | |
174 | instance_id = |
|
178 | instance_id = | |
175 |
|
179 | |||
176 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
180 | ## alternative return HTTP header for failed authentication. Default HTTP | |
177 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
181 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
178 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
182 | ## handling that. Set this variable to 403 to return HTTPForbidden | |
179 | auth_ret_code = |
|
183 | auth_ret_code = | |
180 |
|
184 | |||
181 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
185 | ## locking return code. When repository is locked return this HTTP code. 2XX | |
182 | ## codes don't break the transactions while 4XX codes do |
|
186 | ## codes don't break the transactions while 4XX codes do | |
183 | lock_ret_code = 423 |
|
187 | lock_ret_code = 423 | |
184 |
|
188 | |||
185 |
|
189 | |||
186 | #################################### |
|
190 | #################################### | |
187 | ### CELERY CONFIG #### |
|
191 | ### CELERY CONFIG #### | |
188 | #################################### |
|
192 | #################################### | |
189 | use_celery = false |
|
193 | use_celery = false | |
190 | broker.host = localhost |
|
194 | broker.host = localhost | |
191 | broker.vhost = rabbitmqhost |
|
195 | broker.vhost = rabbitmqhost | |
192 | broker.port = 5672 |
|
196 | broker.port = 5672 | |
193 | broker.user = rabbitmq |
|
197 | broker.user = rabbitmq | |
194 | broker.password = qweqwe |
|
198 | broker.password = qweqwe | |
195 |
|
199 | |||
196 | celery.imports = rhodecode.lib.celerylib.tasks |
|
200 | celery.imports = rhodecode.lib.celerylib.tasks | |
197 |
|
201 | |||
198 | celery.result.backend = amqp |
|
202 | celery.result.backend = amqp | |
199 | celery.result.dburi = amqp:// |
|
203 | celery.result.dburi = amqp:// | |
200 | celery.result.serialier = json |
|
204 | celery.result.serialier = json | |
201 |
|
205 | |||
202 | #celery.send.task.error.emails = true |
|
206 | #celery.send.task.error.emails = true | |
203 | #celery.amqp.task.result.expires = 18000 |
|
207 | #celery.amqp.task.result.expires = 18000 | |
204 |
|
208 | |||
205 | celeryd.concurrency = 2 |
|
209 | celeryd.concurrency = 2 | |
206 | #celeryd.log.file = celeryd.log |
|
210 | #celeryd.log.file = celeryd.log | |
207 | celeryd.log.level = debug |
|
211 | celeryd.log.level = debug | |
208 | celeryd.max.tasks.per.child = 1 |
|
212 | celeryd.max.tasks.per.child = 1 | |
209 |
|
213 | |||
210 | ## tasks will never be sent to the queue, but executed locally instead. |
|
214 | ## tasks will never be sent to the queue, but executed locally instead. | |
211 | celery.always.eager = false |
|
215 | celery.always.eager = false | |
212 |
|
216 | |||
213 | #################################### |
|
217 | #################################### | |
214 | ### BEAKER CACHE #### |
|
218 | ### BEAKER CACHE #### | |
215 | #################################### |
|
219 | #################################### | |
216 | beaker.cache.data_dir=%(here)s/data/cache/data |
|
220 | beaker.cache.data_dir=%(here)s/data/cache/data | |
217 | beaker.cache.lock_dir=%(here)s/data/cache/lock |
|
221 | beaker.cache.lock_dir=%(here)s/data/cache/lock | |
218 |
|
222 | |||
219 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long |
|
223 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long | |
220 |
|
224 | |||
221 | beaker.cache.super_short_term.type=memory |
|
225 | beaker.cache.super_short_term.type=memory | |
222 | beaker.cache.super_short_term.expire=10 |
|
226 | beaker.cache.super_short_term.expire=10 | |
223 | beaker.cache.super_short_term.key_length = 256 |
|
227 | beaker.cache.super_short_term.key_length = 256 | |
224 |
|
228 | |||
225 | beaker.cache.short_term.type=memory |
|
229 | beaker.cache.short_term.type=memory | |
226 | beaker.cache.short_term.expire=60 |
|
230 | beaker.cache.short_term.expire=60 | |
227 | beaker.cache.short_term.key_length = 256 |
|
231 | beaker.cache.short_term.key_length = 256 | |
228 |
|
232 | |||
229 | beaker.cache.long_term.type=memory |
|
233 | beaker.cache.long_term.type=memory | |
230 | beaker.cache.long_term.expire=36000 |
|
234 | beaker.cache.long_term.expire=36000 | |
231 | beaker.cache.long_term.key_length = 256 |
|
235 | beaker.cache.long_term.key_length = 256 | |
232 |
|
236 | |||
233 | beaker.cache.sql_cache_short.type=memory |
|
237 | beaker.cache.sql_cache_short.type=memory | |
234 | beaker.cache.sql_cache_short.expire=10 |
|
238 | beaker.cache.sql_cache_short.expire=10 | |
235 | beaker.cache.sql_cache_short.key_length = 256 |
|
239 | beaker.cache.sql_cache_short.key_length = 256 | |
236 |
|
240 | |||
237 | beaker.cache.sql_cache_med.type=memory |
|
241 | beaker.cache.sql_cache_med.type=memory | |
238 | beaker.cache.sql_cache_med.expire=360 |
|
242 | beaker.cache.sql_cache_med.expire=360 | |
239 | beaker.cache.sql_cache_med.key_length = 256 |
|
243 | beaker.cache.sql_cache_med.key_length = 256 | |
240 |
|
244 | |||
241 | beaker.cache.sql_cache_long.type=file |
|
245 | beaker.cache.sql_cache_long.type=file | |
242 | beaker.cache.sql_cache_long.expire=3600 |
|
246 | beaker.cache.sql_cache_long.expire=3600 | |
243 | beaker.cache.sql_cache_long.key_length = 256 |
|
247 | beaker.cache.sql_cache_long.key_length = 256 | |
244 |
|
248 | |||
245 | #################################### |
|
249 | #################################### | |
246 | ### BEAKER SESSION #### |
|
250 | ### BEAKER SESSION #### | |
247 | #################################### |
|
251 | #################################### | |
248 | ## Type of storage used for the session, current types are |
|
252 | ## Type of storage used for the session, current types are | |
249 | ## dbm, file, memcached, database, and memory. |
|
253 | ## dbm, file, memcached, database, and memory. | |
250 | ## The storage uses the Container API |
|
254 | ## The storage uses the Container API | |
251 | ## that is also used by the cache system. |
|
255 | ## that is also used by the cache system. | |
252 |
|
256 | |||
253 | ## db session ## |
|
257 | ## db session ## | |
254 | #beaker.session.type = ext:database |
|
258 | #beaker.session.type = ext:database | |
255 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode |
|
259 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode | |
256 | #beaker.session.table_name = db_session |
|
260 | #beaker.session.table_name = db_session | |
257 |
|
261 | |||
258 | ## encrypted cookie client side session, good for many instances ## |
|
262 | ## encrypted cookie client side session, good for many instances ## | |
259 | #beaker.session.type = cookie |
|
263 | #beaker.session.type = cookie | |
260 |
|
264 | |||
261 | ## file based cookies (default) ## |
|
265 | ## file based cookies (default) ## | |
262 | #beaker.session.type = file |
|
266 | #beaker.session.type = file | |
263 |
|
267 | |||
264 |
|
268 | |||
265 | beaker.session.key = rhodecode |
|
269 | beaker.session.key = rhodecode | |
266 | ## secure cookie requires AES python libraries |
|
270 | ## secure cookie requires AES python libraries | |
267 | #beaker.session.encrypt_key = <key_for_encryption> |
|
271 | #beaker.session.encrypt_key = <key_for_encryption> | |
268 | #beaker.session.validate_key = <validation_key> |
|
272 | #beaker.session.validate_key = <validation_key> | |
269 |
|
273 | |||
270 | ## sets session as invalid if it haven't been accessed for given amount of time |
|
274 | ## sets session as invalid if it haven't been accessed for given amount of time | |
271 | beaker.session.timeout = 2592000 |
|
275 | beaker.session.timeout = 2592000 | |
272 | beaker.session.httponly = true |
|
276 | beaker.session.httponly = true | |
273 | #beaker.session.cookie_path = /<your-prefix> |
|
277 | #beaker.session.cookie_path = /<your-prefix> | |
274 |
|
278 | |||
275 | ## uncomment for https secure cookie |
|
279 | ## uncomment for https secure cookie | |
276 | beaker.session.secure = false |
|
280 | beaker.session.secure = false | |
277 |
|
281 | |||
278 | ## auto save the session to not to use .save() |
|
282 | ## auto save the session to not to use .save() | |
279 | beaker.session.auto = False |
|
283 | beaker.session.auto = False | |
280 |
|
284 | |||
281 | ## default cookie expiration time in seconds `true` expire at browser close ## |
|
285 | ## default cookie expiration time in seconds `true` expire at browser close ## | |
282 | #beaker.session.cookie_expires = 3600 |
|
286 | #beaker.session.cookie_expires = 3600 | |
283 |
|
287 | |||
284 |
|
288 | |||
285 | ############################ |
|
289 | ############################ | |
286 | ## ERROR HANDLING SYSTEMS ## |
|
290 | ## ERROR HANDLING SYSTEMS ## | |
287 | ############################ |
|
291 | ############################ | |
288 |
|
292 | |||
289 | #################### |
|
293 | #################### | |
290 | ### [errormator] ### |
|
294 | ### [errormator] ### | |
291 | #################### |
|
295 | #################### | |
292 |
|
296 | |||
293 | ## Errormator is tailored to work with RhodeCode, see |
|
297 | ## Errormator is tailored to work with RhodeCode, see | |
294 | ## http://errormator.com for details how to obtain an account |
|
298 | ## http://errormator.com for details how to obtain an account | |
295 | ## you must install python package `errormator_client` to make it work |
|
299 | ## you must install python package `errormator_client` to make it work | |
296 |
|
300 | |||
297 | ## errormator enabled |
|
301 | ## errormator enabled | |
298 | errormator = false |
|
302 | errormator = false | |
299 |
|
303 | |||
300 | errormator.server_url = https://api.errormator.com |
|
304 | errormator.server_url = https://api.errormator.com | |
301 | errormator.api_key = YOUR_API_KEY |
|
305 | errormator.api_key = YOUR_API_KEY | |
302 |
|
306 | |||
303 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
307 | ## TWEAK AMOUNT OF INFO SENT HERE | |
304 |
|
308 | |||
305 | ## enables 404 error logging (default False) |
|
309 | ## enables 404 error logging (default False) | |
306 | errormator.report_404 = false |
|
310 | errormator.report_404 = false | |
307 |
|
311 | |||
308 | ## time in seconds after request is considered being slow (default 1) |
|
312 | ## time in seconds after request is considered being slow (default 1) | |
309 | errormator.slow_request_time = 1 |
|
313 | errormator.slow_request_time = 1 | |
310 |
|
314 | |||
311 | ## record slow requests in application |
|
315 | ## record slow requests in application | |
312 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
316 | ## (needs to be enabled for slow datastore recording and time tracking) | |
313 | errormator.slow_requests = true |
|
317 | errormator.slow_requests = true | |
314 |
|
318 | |||
315 | ## enable hooking to application loggers |
|
319 | ## enable hooking to application loggers | |
316 | # errormator.logging = true |
|
320 | # errormator.logging = true | |
317 |
|
321 | |||
318 | ## minimum log level for log capture |
|
322 | ## minimum log level for log capture | |
319 | # errormator.logging.level = WARNING |
|
323 | # errormator.logging.level = WARNING | |
320 |
|
324 | |||
321 | ## send logs only from erroneous/slow requests |
|
325 | ## send logs only from erroneous/slow requests | |
322 | ## (saves API quota for intensive logging) |
|
326 | ## (saves API quota for intensive logging) | |
323 | errormator.logging_on_error = false |
|
327 | errormator.logging_on_error = false | |
324 |
|
328 | |||
325 | ## list of additonal keywords that should be grabbed from environ object |
|
329 | ## list of additonal keywords that should be grabbed from environ object | |
326 | ## can be string with comma separated list of words in lowercase |
|
330 | ## can be string with comma separated list of words in lowercase | |
327 | ## (by default client will always send following info: |
|
331 | ## (by default client will always send following info: | |
328 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
332 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that | |
329 | ## start with HTTP* this list be extended with additional keywords here |
|
333 | ## start with HTTP* this list be extended with additional keywords here | |
330 | errormator.environ_keys_whitelist = |
|
334 | errormator.environ_keys_whitelist = | |
331 |
|
335 | |||
332 |
|
336 | |||
333 | ## list of keywords that should be blanked from request object |
|
337 | ## list of keywords that should be blanked from request object | |
334 | ## can be string with comma separated list of words in lowercase |
|
338 | ## can be string with comma separated list of words in lowercase | |
335 | ## (by default client will always blank keys that contain following words |
|
339 | ## (by default client will always blank keys that contain following words | |
336 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
340 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' | |
337 | ## this list be extended with additional keywords set here |
|
341 | ## this list be extended with additional keywords set here | |
338 | errormator.request_keys_blacklist = |
|
342 | errormator.request_keys_blacklist = | |
339 |
|
343 | |||
340 |
|
344 | |||
341 | ## list of namespaces that should be ignores when gathering log entries |
|
345 | ## list of namespaces that should be ignores when gathering log entries | |
342 | ## can be string with comma separated list of namespaces |
|
346 | ## can be string with comma separated list of namespaces | |
343 | ## (by default the client ignores own entries: errormator_client.client) |
|
347 | ## (by default the client ignores own entries: errormator_client.client) | |
344 | errormator.log_namespace_blacklist = |
|
348 | errormator.log_namespace_blacklist = | |
345 |
|
349 | |||
346 |
|
350 | |||
347 | ################ |
|
351 | ################ | |
348 | ### [sentry] ### |
|
352 | ### [sentry] ### | |
349 | ################ |
|
353 | ################ | |
350 |
|
354 | |||
351 | ## sentry is a alternative open source error aggregator |
|
355 | ## sentry is a alternative open source error aggregator | |
352 | ## you must install python packages `sentry` and `raven` to enable |
|
356 | ## you must install python packages `sentry` and `raven` to enable | |
353 |
|
357 | |||
354 | sentry.dsn = YOUR_DNS |
|
358 | sentry.dsn = YOUR_DNS | |
355 | sentry.servers = |
|
359 | sentry.servers = | |
356 | sentry.name = |
|
360 | sentry.name = | |
357 | sentry.key = |
|
361 | sentry.key = | |
358 | sentry.public_key = |
|
362 | sentry.public_key = | |
359 | sentry.secret_key = |
|
363 | sentry.secret_key = | |
360 | sentry.project = |
|
364 | sentry.project = | |
361 | sentry.site = |
|
365 | sentry.site = | |
362 | sentry.include_paths = |
|
366 | sentry.include_paths = | |
363 | sentry.exclude_paths = |
|
367 | sentry.exclude_paths = | |
364 |
|
368 | |||
365 |
|
369 | |||
366 | ################################################################################ |
|
370 | ################################################################################ | |
367 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
371 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## | |
368 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
372 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## | |
369 | ## execute malicious code after an exception is raised. ## |
|
373 | ## execute malicious code after an exception is raised. ## | |
370 | ################################################################################ |
|
374 | ################################################################################ | |
371 | #set debug = false |
|
375 | #set debug = false | |
372 |
|
376 | |||
373 | ################################## |
|
377 | ################################## | |
374 | ### LOGVIEW CONFIG ### |
|
378 | ### LOGVIEW CONFIG ### | |
375 | ################################## |
|
379 | ################################## | |
376 | logview.sqlalchemy = #faa |
|
380 | logview.sqlalchemy = #faa | |
377 | logview.pylons.templating = #bfb |
|
381 | logview.pylons.templating = #bfb | |
378 | logview.pylons.util = #eee |
|
382 | logview.pylons.util = #eee | |
379 |
|
383 | |||
380 | ######################################################### |
|
384 | ######################################################### | |
381 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
385 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### | |
382 | ######################################################### |
|
386 | ######################################################### | |
383 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db |
|
387 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db | |
384 | sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode |
|
388 | sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode | |
385 | sqlalchemy.db1.echo = false |
|
389 | sqlalchemy.db1.echo = false | |
386 | sqlalchemy.db1.pool_recycle = 3600 |
|
390 | sqlalchemy.db1.pool_recycle = 3600 | |
387 | sqlalchemy.db1.convert_unicode = true |
|
391 | sqlalchemy.db1.convert_unicode = true | |
388 |
|
392 | |||
389 | ################################ |
|
393 | ################################ | |
390 | ### LOGGING CONFIGURATION #### |
|
394 | ### LOGGING CONFIGURATION #### | |
391 | ################################ |
|
395 | ################################ | |
392 | [loggers] |
|
396 | [loggers] | |
393 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer |
|
397 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer | |
394 |
|
398 | |||
395 | [handlers] |
|
399 | [handlers] | |
396 | keys = console, console_sql |
|
400 | keys = console, console_sql | |
397 |
|
401 | |||
398 | [formatters] |
|
402 | [formatters] | |
399 | keys = generic, color_formatter, color_formatter_sql |
|
403 | keys = generic, color_formatter, color_formatter_sql | |
400 |
|
404 | |||
401 | ############# |
|
405 | ############# | |
402 | ## LOGGERS ## |
|
406 | ## LOGGERS ## | |
403 | ############# |
|
407 | ############# | |
404 | [logger_root] |
|
408 | [logger_root] | |
405 | level = NOTSET |
|
409 | level = NOTSET | |
406 | handlers = console |
|
410 | handlers = console | |
407 |
|
411 | |||
408 | [logger_routes] |
|
412 | [logger_routes] | |
409 | level = DEBUG |
|
413 | level = DEBUG | |
410 | handlers = |
|
414 | handlers = | |
411 | qualname = routes.middleware |
|
415 | qualname = routes.middleware | |
412 | ## "level = DEBUG" logs the route matched and routing variables. |
|
416 | ## "level = DEBUG" logs the route matched and routing variables. | |
413 | propagate = 1 |
|
417 | propagate = 1 | |
414 |
|
418 | |||
415 | [logger_beaker] |
|
419 | [logger_beaker] | |
416 | level = DEBUG |
|
420 | level = DEBUG | |
417 | handlers = |
|
421 | handlers = | |
418 | qualname = beaker.container |
|
422 | qualname = beaker.container | |
419 | propagate = 1 |
|
423 | propagate = 1 | |
420 |
|
424 | |||
421 | [logger_templates] |
|
425 | [logger_templates] | |
422 | level = INFO |
|
426 | level = INFO | |
423 | handlers = |
|
427 | handlers = | |
424 | qualname = pylons.templating |
|
428 | qualname = pylons.templating | |
425 | propagate = 1 |
|
429 | propagate = 1 | |
426 |
|
430 | |||
427 | [logger_rhodecode] |
|
431 | [logger_rhodecode] | |
428 | level = DEBUG |
|
432 | level = DEBUG | |
429 | handlers = |
|
433 | handlers = | |
430 | qualname = rhodecode |
|
434 | qualname = rhodecode | |
431 | propagate = 1 |
|
435 | propagate = 1 | |
432 |
|
436 | |||
433 | [logger_sqlalchemy] |
|
437 | [logger_sqlalchemy] | |
434 | level = INFO |
|
438 | level = INFO | |
435 | handlers = console_sql |
|
439 | handlers = console_sql | |
436 | qualname = sqlalchemy.engine |
|
440 | qualname = sqlalchemy.engine | |
437 | propagate = 0 |
|
441 | propagate = 0 | |
438 |
|
442 | |||
439 | [logger_whoosh_indexer] |
|
443 | [logger_whoosh_indexer] | |
440 | level = DEBUG |
|
444 | level = DEBUG | |
441 | handlers = |
|
445 | handlers = | |
442 | qualname = whoosh_indexer |
|
446 | qualname = whoosh_indexer | |
443 | propagate = 1 |
|
447 | propagate = 1 | |
444 |
|
448 | |||
445 | ############## |
|
449 | ############## | |
446 | ## HANDLERS ## |
|
450 | ## HANDLERS ## | |
447 | ############## |
|
451 | ############## | |
448 |
|
452 | |||
449 | [handler_console] |
|
453 | [handler_console] | |
450 | class = StreamHandler |
|
454 | class = StreamHandler | |
451 | args = (sys.stderr,) |
|
455 | args = (sys.stderr,) | |
452 | level = DEBUG |
|
456 | level = DEBUG | |
453 | formatter = color_formatter |
|
457 | formatter = color_formatter | |
454 |
|
458 | |||
455 | [handler_console_sql] |
|
459 | [handler_console_sql] | |
456 | class = StreamHandler |
|
460 | class = StreamHandler | |
457 | args = (sys.stderr,) |
|
461 | args = (sys.stderr,) | |
458 | level = DEBUG |
|
462 | level = DEBUG | |
459 | formatter = color_formatter_sql |
|
463 | formatter = color_formatter_sql | |
460 |
|
464 | |||
461 | ################ |
|
465 | ################ | |
462 | ## FORMATTERS ## |
|
466 | ## FORMATTERS ## | |
463 | ################ |
|
467 | ################ | |
464 |
|
468 | |||
465 | [formatter_generic] |
|
469 | [formatter_generic] | |
466 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
470 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
467 | datefmt = %Y-%m-%d %H:%M:%S |
|
471 | datefmt = %Y-%m-%d %H:%M:%S | |
468 |
|
472 | |||
469 | [formatter_color_formatter] |
|
473 | [formatter_color_formatter] | |
470 | class=rhodecode.lib.colored_formatter.ColorFormatter |
|
474 | class=rhodecode.lib.colored_formatter.ColorFormatter | |
471 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
475 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
472 | datefmt = %Y-%m-%d %H:%M:%S |
|
476 | datefmt = %Y-%m-%d %H:%M:%S | |
473 |
|
477 | |||
474 | [formatter_color_formatter_sql] |
|
478 | [formatter_color_formatter_sql] | |
475 | class=rhodecode.lib.colored_formatter.ColorFormatterSql |
|
479 | class=rhodecode.lib.colored_formatter.ColorFormatterSql | |
476 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
480 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
477 | datefmt = %Y-%m-%d %H:%M:%S |
|
481 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,477 +1,481 b'' | |||||
1 | ################################################################################ |
|
1 | ################################################################################ | |
2 | ################################################################################ |
|
2 | ################################################################################ | |
3 | # RhodeCode - Pylons environment configuration # |
|
3 | # RhodeCode - Pylons environment configuration # | |
4 | # # |
|
4 | # # | |
5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
5 | # The %(here)s variable will be replaced with the parent directory of this file# | |
6 | ################################################################################ |
|
6 | ################################################################################ | |
7 |
|
7 | |||
8 | [DEFAULT] |
|
8 | [DEFAULT] | |
9 | debug = true |
|
9 | debug = true | |
10 | pdebug = false |
|
10 | pdebug = false | |
11 | ################################################################################ |
|
11 | ################################################################################ | |
12 | ## Uncomment and replace with the address which should receive ## |
|
12 | ## Uncomment and replace with the address which should receive ## | |
13 | ## any error reports after application crash ## |
|
13 | ## any error reports after application crash ## | |
14 | ## Additionally those settings will be used by RhodeCode mailing system ## |
|
14 | ## Additionally those settings will be used by RhodeCode mailing system ## | |
15 | ################################################################################ |
|
15 | ################################################################################ | |
16 | #email_to = admin@localhost |
|
16 | #email_to = admin@localhost | |
17 | #error_email_from = paste_error@localhost |
|
17 | #error_email_from = paste_error@localhost | |
18 | #app_email_from = rhodecode-noreply@localhost |
|
18 | #app_email_from = rhodecode-noreply@localhost | |
19 | #error_message = |
|
19 | #error_message = | |
20 | #email_prefix = [RhodeCode] |
|
20 | #email_prefix = [RhodeCode] | |
21 |
|
21 | |||
22 | #smtp_server = mail.server.com |
|
22 | #smtp_server = mail.server.com | |
23 | #smtp_username = |
|
23 | #smtp_username = | |
24 | #smtp_password = |
|
24 | #smtp_password = | |
25 | #smtp_port = |
|
25 | #smtp_port = | |
26 | #smtp_use_tls = false |
|
26 | #smtp_use_tls = false | |
27 | #smtp_use_ssl = true |
|
27 | #smtp_use_ssl = true | |
28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) | |
29 | #smtp_auth = |
|
29 | #smtp_auth = | |
30 |
|
30 | |||
31 | [server:main] |
|
31 | [server:main] | |
32 | ## PASTE |
|
32 | ## PASTE | |
33 | ## nr of threads to spawn |
|
33 | ## nr of threads to spawn | |
34 | #threadpool_workers = 5 |
|
34 | #threadpool_workers = 5 | |
35 |
|
35 | |||
36 | ## max request before thread respawn |
|
36 | ## max request before thread respawn | |
37 | #threadpool_max_requests = 10 |
|
37 | #threadpool_max_requests = 10 | |
38 |
|
38 | |||
39 | ## option to use threads of process |
|
39 | ## option to use threads of process | |
40 | #use_threadpool = true |
|
40 | #use_threadpool = true | |
41 |
|
41 | |||
42 | #use = egg:Paste#http |
|
42 | #use = egg:Paste#http | |
43 |
|
43 | |||
44 | ## WAITRESS |
|
44 | ## WAITRESS | |
45 | threads = 5 |
|
45 | threads = 5 | |
46 | ## 100GB |
|
46 | ## 100GB | |
47 | max_request_body_size = 107374182400 |
|
47 | max_request_body_size = 107374182400 | |
48 | use = egg:waitress#main |
|
48 | use = egg:waitress#main | |
49 |
|
49 | |||
50 | host = 127.0.0.1 |
|
50 | host = 127.0.0.1 | |
51 | port = 8001 |
|
51 | port = 8001 | |
52 |
|
52 | |||
53 | ## prefix middleware for rc |
|
53 | ## prefix middleware for rc | |
54 | #[filter:proxy-prefix] |
|
54 | #[filter:proxy-prefix] | |
55 | #use = egg:PasteDeploy#prefix |
|
55 | #use = egg:PasteDeploy#prefix | |
56 | #prefix = /<your-prefix> |
|
56 | #prefix = /<your-prefix> | |
57 |
|
57 | |||
58 | [app:main] |
|
58 | [app:main] | |
59 | use = egg:rhodecode |
|
59 | use = egg:rhodecode | |
60 | ## enable proxy prefix middleware |
|
60 | ## enable proxy prefix middleware | |
61 | #filter-with = proxy-prefix |
|
61 | #filter-with = proxy-prefix | |
62 |
|
62 | |||
63 | full_stack = true |
|
63 | full_stack = true | |
64 | static_files = true |
|
64 | static_files = true | |
65 | ## Optional Languages |
|
65 | ## Optional Languages | |
66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl |
|
66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl | |
67 | lang = en |
|
67 | lang = en | |
68 | cache_dir = %(here)s/data |
|
68 | cache_dir = %(here)s/data | |
69 | index_dir = %(here)s/data/index |
|
69 | index_dir = %(here)s/data/index | |
70 |
|
70 | |||
71 | ## uncomment and set this path to use archive download cache |
|
71 | ## uncomment and set this path to use archive download cache | |
72 | #archive_cache_dir = /tmp/tarballcache |
|
72 | #archive_cache_dir = /tmp/tarballcache | |
73 |
|
73 | |||
74 | ## change this to unique ID for security |
|
74 | ## change this to unique ID for security | |
75 | app_instance_uuid = rc-production |
|
75 | app_instance_uuid = rc-production | |
76 |
|
76 | |||
77 | ## cut off limit for large diffs (size in bytes) |
|
77 | ## cut off limit for large diffs (size in bytes) | |
78 | cut_off_limit = 256000 |
|
78 | cut_off_limit = 256000 | |
79 |
|
79 | |||
80 | ## use cache version of scm repo everywhere |
|
80 | ## use cache version of scm repo everywhere | |
81 | vcs_full_cache = true |
|
81 | vcs_full_cache = true | |
82 |
|
82 | |||
83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https | |
84 | force_https = false |
|
84 | force_https = false | |
85 |
|
85 | |||
86 | ## use Strict-Transport-Security headers |
|
86 | ## use Strict-Transport-Security headers | |
87 | use_htsts = false |
|
87 | use_htsts = false | |
88 |
|
88 | |||
89 | ## number of commits stats will parse on each iteration |
|
89 | ## number of commits stats will parse on each iteration | |
90 | commit_parse_limit = 25 |
|
90 | commit_parse_limit = 25 | |
91 |
|
91 | |||
92 | ## number of items displayed in lightweight dashboard before paginating is shown |
|
92 | ## number of items displayed in lightweight dashboard before paginating is shown | |
93 | dashboard_items = 100 |
|
93 | dashboard_items = 100 | |
94 |
|
94 | |||
95 | ## use gravatar service to display avatars |
|
95 | ## use gravatar service to display avatars | |
96 | use_gravatar = true |
|
96 | use_gravatar = true | |
97 |
|
97 | |||
98 | ## path to git executable |
|
98 | ## path to git executable | |
99 | git_path = git |
|
99 | git_path = git | |
100 |
|
100 | |||
|
101 | ## git rev filter option, --all is the default filter, if you need to | |||
|
102 | ## hide all refs in changelog switch this to --branches --tags | |||
|
103 | git_rev_filter=--all | |||
|
104 | ||||
101 | ## RSS feed options |
|
105 | ## RSS feed options | |
102 | rss_cut_off_limit = 256000 |
|
106 | rss_cut_off_limit = 256000 | |
103 | rss_items_per_page = 10 |
|
107 | rss_items_per_page = 10 | |
104 | rss_include_diff = false |
|
108 | rss_include_diff = false | |
105 |
|
109 | |||
106 | ## show hash options for changelog |
|
110 | ## show hash options for changelog | |
107 | sha_len = 12 |
|
111 | sha_len = 12 | |
108 | sha_rev = true |
|
112 | sha_rev = true | |
109 |
|
113 | |||
110 |
|
114 | |||
111 | ## alternative_gravatar_url allows you to use your own avatar server application |
|
115 | ## alternative_gravatar_url allows you to use your own avatar server application | |
112 | ## the following parts of the URL will be replaced |
|
116 | ## the following parts of the URL will be replaced | |
113 | ## {email} user email |
|
117 | ## {email} user email | |
114 | ## {md5email} md5 hash of the user email (like at gravatar.com) |
|
118 | ## {md5email} md5 hash of the user email (like at gravatar.com) | |
115 | ## {size} size of the image that is expected from the server application |
|
119 | ## {size} size of the image that is expected from the server application | |
116 | ## {scheme} http/https from RhodeCode server |
|
120 | ## {scheme} http/https from RhodeCode server | |
117 | ## {netloc} network location from RhodeCode server |
|
121 | ## {netloc} network location from RhodeCode server | |
118 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} |
|
122 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} | |
119 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} |
|
123 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} | |
120 |
|
124 | |||
121 |
|
125 | |||
122 | ## container auth options |
|
126 | ## container auth options | |
123 | container_auth_enabled = false |
|
127 | container_auth_enabled = false | |
124 | proxypass_auth_enabled = false |
|
128 | proxypass_auth_enabled = false | |
125 |
|
129 | |||
126 | ## default encoding used to convert from and to unicode |
|
130 | ## default encoding used to convert from and to unicode | |
127 | ## can be also a comma seperated list of encoding in case of mixed encodings |
|
131 | ## can be also a comma seperated list of encoding in case of mixed encodings | |
128 | default_encoding = utf8 |
|
132 | default_encoding = utf8 | |
129 |
|
133 | |||
130 | ## overwrite schema of clone url |
|
134 | ## overwrite schema of clone url | |
131 | ## available vars: |
|
135 | ## available vars: | |
132 | ## scheme - http/https |
|
136 | ## scheme - http/https | |
133 | ## user - current user |
|
137 | ## user - current user | |
134 | ## pass - password |
|
138 | ## pass - password | |
135 | ## netloc - network location |
|
139 | ## netloc - network location | |
136 | ## path - usually repo_name |
|
140 | ## path - usually repo_name | |
137 |
|
141 | |||
138 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} |
|
142 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} | |
139 |
|
143 | |||
140 | ## issue tracking mapping for commits messages |
|
144 | ## issue tracking mapping for commits messages | |
141 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
145 | ## comment out issue_pat, issue_server, issue_prefix to enable | |
142 |
|
146 | |||
143 | ## pattern to get the issues from commit messages |
|
147 | ## pattern to get the issues from commit messages | |
144 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
148 | ## default one used here is #<numbers> with a regex passive group for `#` | |
145 | ## {id} will be all groups matched from this pattern |
|
149 | ## {id} will be all groups matched from this pattern | |
146 |
|
150 | |||
147 | issue_pat = (?:\s*#)(\d+) |
|
151 | issue_pat = (?:\s*#)(\d+) | |
148 |
|
152 | |||
149 | ## server url to the issue, each {id} will be replaced with match |
|
153 | ## server url to the issue, each {id} will be replaced with match | |
150 | ## fetched from the regex and {repo} is replaced with full repository name |
|
154 | ## fetched from the regex and {repo} is replaced with full repository name | |
151 | ## including groups {repo_name} is replaced with just name of repo |
|
155 | ## including groups {repo_name} is replaced with just name of repo | |
152 |
|
156 | |||
153 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} |
|
157 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} | |
154 |
|
158 | |||
155 | ## prefix to add to link to indicate it's an url |
|
159 | ## prefix to add to link to indicate it's an url | |
156 | ## #314 will be replaced by <issue_prefix><id> |
|
160 | ## #314 will be replaced by <issue_prefix><id> | |
157 |
|
161 | |||
158 | issue_prefix = # |
|
162 | issue_prefix = # | |
159 |
|
163 | |||
160 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
164 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify | |
161 | ## multiple patterns, to other issues server, wiki or others |
|
165 | ## multiple patterns, to other issues server, wiki or others | |
162 | ## below an example how to create a wiki pattern |
|
166 | ## below an example how to create a wiki pattern | |
163 | # #wiki-some-id -> https://mywiki.com/some-id |
|
167 | # #wiki-some-id -> https://mywiki.com/some-id | |
164 |
|
168 | |||
165 | #issue_pat_wiki = (?:wiki-)(.+) |
|
169 | #issue_pat_wiki = (?:wiki-)(.+) | |
166 | #issue_server_link_wiki = https://mywiki.com/{id} |
|
170 | #issue_server_link_wiki = https://mywiki.com/{id} | |
167 | #issue_prefix_wiki = WIKI- |
|
171 | #issue_prefix_wiki = WIKI- | |
168 |
|
172 | |||
169 |
|
173 | |||
170 | ## instance-id prefix |
|
174 | ## instance-id prefix | |
171 | ## a prefix key for this instance used for cache invalidation when running |
|
175 | ## a prefix key for this instance used for cache invalidation when running | |
172 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
176 | ## multiple instances of rhodecode, make sure it's globally unique for | |
173 | ## all running rhodecode instances. Leave empty if you don't use it |
|
177 | ## all running rhodecode instances. Leave empty if you don't use it | |
174 | instance_id = |
|
178 | instance_id = | |
175 |
|
179 | |||
176 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
180 | ## alternative return HTTP header for failed authentication. Default HTTP | |
177 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
181 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
178 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
182 | ## handling that. Set this variable to 403 to return HTTPForbidden | |
179 | auth_ret_code = |
|
183 | auth_ret_code = | |
180 |
|
184 | |||
181 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
185 | ## locking return code. When repository is locked return this HTTP code. 2XX | |
182 | ## codes don't break the transactions while 4XX codes do |
|
186 | ## codes don't break the transactions while 4XX codes do | |
183 | lock_ret_code = 423 |
|
187 | lock_ret_code = 423 | |
184 |
|
188 | |||
185 |
|
189 | |||
186 | #################################### |
|
190 | #################################### | |
187 | ### CELERY CONFIG #### |
|
191 | ### CELERY CONFIG #### | |
188 | #################################### |
|
192 | #################################### | |
189 | use_celery = false |
|
193 | use_celery = false | |
190 | broker.host = localhost |
|
194 | broker.host = localhost | |
191 | broker.vhost = rabbitmqhost |
|
195 | broker.vhost = rabbitmqhost | |
192 | broker.port = 5672 |
|
196 | broker.port = 5672 | |
193 | broker.user = rabbitmq |
|
197 | broker.user = rabbitmq | |
194 | broker.password = qweqwe |
|
198 | broker.password = qweqwe | |
195 |
|
199 | |||
196 | celery.imports = rhodecode.lib.celerylib.tasks |
|
200 | celery.imports = rhodecode.lib.celerylib.tasks | |
197 |
|
201 | |||
198 | celery.result.backend = amqp |
|
202 | celery.result.backend = amqp | |
199 | celery.result.dburi = amqp:// |
|
203 | celery.result.dburi = amqp:// | |
200 | celery.result.serialier = json |
|
204 | celery.result.serialier = json | |
201 |
|
205 | |||
202 | #celery.send.task.error.emails = true |
|
206 | #celery.send.task.error.emails = true | |
203 | #celery.amqp.task.result.expires = 18000 |
|
207 | #celery.amqp.task.result.expires = 18000 | |
204 |
|
208 | |||
205 | celeryd.concurrency = 2 |
|
209 | celeryd.concurrency = 2 | |
206 | #celeryd.log.file = celeryd.log |
|
210 | #celeryd.log.file = celeryd.log | |
207 | celeryd.log.level = debug |
|
211 | celeryd.log.level = debug | |
208 | celeryd.max.tasks.per.child = 1 |
|
212 | celeryd.max.tasks.per.child = 1 | |
209 |
|
213 | |||
210 | ## tasks will never be sent to the queue, but executed locally instead. |
|
214 | ## tasks will never be sent to the queue, but executed locally instead. | |
211 | celery.always.eager = false |
|
215 | celery.always.eager = false | |
212 |
|
216 | |||
213 | #################################### |
|
217 | #################################### | |
214 | ### BEAKER CACHE #### |
|
218 | ### BEAKER CACHE #### | |
215 | #################################### |
|
219 | #################################### | |
216 | beaker.cache.data_dir=%(here)s/data/cache/data |
|
220 | beaker.cache.data_dir=%(here)s/data/cache/data | |
217 | beaker.cache.lock_dir=%(here)s/data/cache/lock |
|
221 | beaker.cache.lock_dir=%(here)s/data/cache/lock | |
218 |
|
222 | |||
219 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long |
|
223 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long | |
220 |
|
224 | |||
221 | beaker.cache.super_short_term.type=memory |
|
225 | beaker.cache.super_short_term.type=memory | |
222 | beaker.cache.super_short_term.expire=10 |
|
226 | beaker.cache.super_short_term.expire=10 | |
223 | beaker.cache.super_short_term.key_length = 256 |
|
227 | beaker.cache.super_short_term.key_length = 256 | |
224 |
|
228 | |||
225 | beaker.cache.short_term.type=memory |
|
229 | beaker.cache.short_term.type=memory | |
226 | beaker.cache.short_term.expire=60 |
|
230 | beaker.cache.short_term.expire=60 | |
227 | beaker.cache.short_term.key_length = 256 |
|
231 | beaker.cache.short_term.key_length = 256 | |
228 |
|
232 | |||
229 | beaker.cache.long_term.type=memory |
|
233 | beaker.cache.long_term.type=memory | |
230 | beaker.cache.long_term.expire=36000 |
|
234 | beaker.cache.long_term.expire=36000 | |
231 | beaker.cache.long_term.key_length = 256 |
|
235 | beaker.cache.long_term.key_length = 256 | |
232 |
|
236 | |||
233 | beaker.cache.sql_cache_short.type=memory |
|
237 | beaker.cache.sql_cache_short.type=memory | |
234 | beaker.cache.sql_cache_short.expire=10 |
|
238 | beaker.cache.sql_cache_short.expire=10 | |
235 | beaker.cache.sql_cache_short.key_length = 256 |
|
239 | beaker.cache.sql_cache_short.key_length = 256 | |
236 |
|
240 | |||
237 | beaker.cache.sql_cache_med.type=memory |
|
241 | beaker.cache.sql_cache_med.type=memory | |
238 | beaker.cache.sql_cache_med.expire=360 |
|
242 | beaker.cache.sql_cache_med.expire=360 | |
239 | beaker.cache.sql_cache_med.key_length = 256 |
|
243 | beaker.cache.sql_cache_med.key_length = 256 | |
240 |
|
244 | |||
241 | beaker.cache.sql_cache_long.type=file |
|
245 | beaker.cache.sql_cache_long.type=file | |
242 | beaker.cache.sql_cache_long.expire=3600 |
|
246 | beaker.cache.sql_cache_long.expire=3600 | |
243 | beaker.cache.sql_cache_long.key_length = 256 |
|
247 | beaker.cache.sql_cache_long.key_length = 256 | |
244 |
|
248 | |||
245 | #################################### |
|
249 | #################################### | |
246 | ### BEAKER SESSION #### |
|
250 | ### BEAKER SESSION #### | |
247 | #################################### |
|
251 | #################################### | |
248 | ## Type of storage used for the session, current types are |
|
252 | ## Type of storage used for the session, current types are | |
249 | ## dbm, file, memcached, database, and memory. |
|
253 | ## dbm, file, memcached, database, and memory. | |
250 | ## The storage uses the Container API |
|
254 | ## The storage uses the Container API | |
251 | ## that is also used by the cache system. |
|
255 | ## that is also used by the cache system. | |
252 |
|
256 | |||
253 | ## db session ## |
|
257 | ## db session ## | |
254 | #beaker.session.type = ext:database |
|
258 | #beaker.session.type = ext:database | |
255 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode |
|
259 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode | |
256 | #beaker.session.table_name = db_session |
|
260 | #beaker.session.table_name = db_session | |
257 |
|
261 | |||
258 | ## encrypted cookie client side session, good for many instances ## |
|
262 | ## encrypted cookie client side session, good for many instances ## | |
259 | #beaker.session.type = cookie |
|
263 | #beaker.session.type = cookie | |
260 |
|
264 | |||
261 | ## file based cookies (default) ## |
|
265 | ## file based cookies (default) ## | |
262 | #beaker.session.type = file |
|
266 | #beaker.session.type = file | |
263 |
|
267 | |||
264 |
|
268 | |||
265 | beaker.session.key = rhodecode |
|
269 | beaker.session.key = rhodecode | |
266 | ## secure cookie requires AES python libraries |
|
270 | ## secure cookie requires AES python libraries | |
267 | #beaker.session.encrypt_key = <key_for_encryption> |
|
271 | #beaker.session.encrypt_key = <key_for_encryption> | |
268 | #beaker.session.validate_key = <validation_key> |
|
272 | #beaker.session.validate_key = <validation_key> | |
269 |
|
273 | |||
270 | ## sets session as invalid if it haven't been accessed for given amount of time |
|
274 | ## sets session as invalid if it haven't been accessed for given amount of time | |
271 | beaker.session.timeout = 2592000 |
|
275 | beaker.session.timeout = 2592000 | |
272 | beaker.session.httponly = true |
|
276 | beaker.session.httponly = true | |
273 | #beaker.session.cookie_path = /<your-prefix> |
|
277 | #beaker.session.cookie_path = /<your-prefix> | |
274 |
|
278 | |||
275 | ## uncomment for https secure cookie |
|
279 | ## uncomment for https secure cookie | |
276 | beaker.session.secure = false |
|
280 | beaker.session.secure = false | |
277 |
|
281 | |||
278 | ## auto save the session to not to use .save() |
|
282 | ## auto save the session to not to use .save() | |
279 | beaker.session.auto = False |
|
283 | beaker.session.auto = False | |
280 |
|
284 | |||
281 | ## default cookie expiration time in seconds `true` expire at browser close ## |
|
285 | ## default cookie expiration time in seconds `true` expire at browser close ## | |
282 | #beaker.session.cookie_expires = 3600 |
|
286 | #beaker.session.cookie_expires = 3600 | |
283 |
|
287 | |||
284 |
|
288 | |||
285 | ############################ |
|
289 | ############################ | |
286 | ## ERROR HANDLING SYSTEMS ## |
|
290 | ## ERROR HANDLING SYSTEMS ## | |
287 | ############################ |
|
291 | ############################ | |
288 |
|
292 | |||
289 | #################### |
|
293 | #################### | |
290 | ### [errormator] ### |
|
294 | ### [errormator] ### | |
291 | #################### |
|
295 | #################### | |
292 |
|
296 | |||
293 | ## Errormator is tailored to work with RhodeCode, see |
|
297 | ## Errormator is tailored to work with RhodeCode, see | |
294 | ## http://errormator.com for details how to obtain an account |
|
298 | ## http://errormator.com for details how to obtain an account | |
295 | ## you must install python package `errormator_client` to make it work |
|
299 | ## you must install python package `errormator_client` to make it work | |
296 |
|
300 | |||
297 | ## errormator enabled |
|
301 | ## errormator enabled | |
298 | errormator = false |
|
302 | errormator = false | |
299 |
|
303 | |||
300 | errormator.server_url = https://api.errormator.com |
|
304 | errormator.server_url = https://api.errormator.com | |
301 | errormator.api_key = YOUR_API_KEY |
|
305 | errormator.api_key = YOUR_API_KEY | |
302 |
|
306 | |||
303 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
307 | ## TWEAK AMOUNT OF INFO SENT HERE | |
304 |
|
308 | |||
305 | ## enables 404 error logging (default False) |
|
309 | ## enables 404 error logging (default False) | |
306 | errormator.report_404 = false |
|
310 | errormator.report_404 = false | |
307 |
|
311 | |||
308 | ## time in seconds after request is considered being slow (default 1) |
|
312 | ## time in seconds after request is considered being slow (default 1) | |
309 | errormator.slow_request_time = 1 |
|
313 | errormator.slow_request_time = 1 | |
310 |
|
314 | |||
311 | ## record slow requests in application |
|
315 | ## record slow requests in application | |
312 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
316 | ## (needs to be enabled for slow datastore recording and time tracking) | |
313 | errormator.slow_requests = true |
|
317 | errormator.slow_requests = true | |
314 |
|
318 | |||
315 | ## enable hooking to application loggers |
|
319 | ## enable hooking to application loggers | |
316 | # errormator.logging = true |
|
320 | # errormator.logging = true | |
317 |
|
321 | |||
318 | ## minimum log level for log capture |
|
322 | ## minimum log level for log capture | |
319 | # errormator.logging.level = WARNING |
|
323 | # errormator.logging.level = WARNING | |
320 |
|
324 | |||
321 | ## send logs only from erroneous/slow requests |
|
325 | ## send logs only from erroneous/slow requests | |
322 | ## (saves API quota for intensive logging) |
|
326 | ## (saves API quota for intensive logging) | |
323 | errormator.logging_on_error = false |
|
327 | errormator.logging_on_error = false | |
324 |
|
328 | |||
325 | ## list of additonal keywords that should be grabbed from environ object |
|
329 | ## list of additonal keywords that should be grabbed from environ object | |
326 | ## can be string with comma separated list of words in lowercase |
|
330 | ## can be string with comma separated list of words in lowercase | |
327 | ## (by default client will always send following info: |
|
331 | ## (by default client will always send following info: | |
328 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
332 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that | |
329 | ## start with HTTP* this list be extended with additional keywords here |
|
333 | ## start with HTTP* this list be extended with additional keywords here | |
330 | errormator.environ_keys_whitelist = |
|
334 | errormator.environ_keys_whitelist = | |
331 |
|
335 | |||
332 |
|
336 | |||
333 | ## list of keywords that should be blanked from request object |
|
337 | ## list of keywords that should be blanked from request object | |
334 | ## can be string with comma separated list of words in lowercase |
|
338 | ## can be string with comma separated list of words in lowercase | |
335 | ## (by default client will always blank keys that contain following words |
|
339 | ## (by default client will always blank keys that contain following words | |
336 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
340 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' | |
337 | ## this list be extended with additional keywords set here |
|
341 | ## this list be extended with additional keywords set here | |
338 | errormator.request_keys_blacklist = |
|
342 | errormator.request_keys_blacklist = | |
339 |
|
343 | |||
340 |
|
344 | |||
341 | ## list of namespaces that should be ignores when gathering log entries |
|
345 | ## list of namespaces that should be ignores when gathering log entries | |
342 | ## can be string with comma separated list of namespaces |
|
346 | ## can be string with comma separated list of namespaces | |
343 | ## (by default the client ignores own entries: errormator_client.client) |
|
347 | ## (by default the client ignores own entries: errormator_client.client) | |
344 | errormator.log_namespace_blacklist = |
|
348 | errormator.log_namespace_blacklist = | |
345 |
|
349 | |||
346 |
|
350 | |||
347 | ################ |
|
351 | ################ | |
348 | ### [sentry] ### |
|
352 | ### [sentry] ### | |
349 | ################ |
|
353 | ################ | |
350 |
|
354 | |||
351 | ## sentry is a alternative open source error aggregator |
|
355 | ## sentry is a alternative open source error aggregator | |
352 | ## you must install python packages `sentry` and `raven` to enable |
|
356 | ## you must install python packages `sentry` and `raven` to enable | |
353 |
|
357 | |||
354 | sentry.dsn = YOUR_DNS |
|
358 | sentry.dsn = YOUR_DNS | |
355 | sentry.servers = |
|
359 | sentry.servers = | |
356 | sentry.name = |
|
360 | sentry.name = | |
357 | sentry.key = |
|
361 | sentry.key = | |
358 | sentry.public_key = |
|
362 | sentry.public_key = | |
359 | sentry.secret_key = |
|
363 | sentry.secret_key = | |
360 | sentry.project = |
|
364 | sentry.project = | |
361 | sentry.site = |
|
365 | sentry.site = | |
362 | sentry.include_paths = |
|
366 | sentry.include_paths = | |
363 | sentry.exclude_paths = |
|
367 | sentry.exclude_paths = | |
364 |
|
368 | |||
365 |
|
369 | |||
366 | ################################################################################ |
|
370 | ################################################################################ | |
367 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
371 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## | |
368 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
372 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## | |
369 | ## execute malicious code after an exception is raised. ## |
|
373 | ## execute malicious code after an exception is raised. ## | |
370 | ################################################################################ |
|
374 | ################################################################################ | |
371 | set debug = false |
|
375 | set debug = false | |
372 |
|
376 | |||
373 | ################################## |
|
377 | ################################## | |
374 | ### LOGVIEW CONFIG ### |
|
378 | ### LOGVIEW CONFIG ### | |
375 | ################################## |
|
379 | ################################## | |
376 | logview.sqlalchemy = #faa |
|
380 | logview.sqlalchemy = #faa | |
377 | logview.pylons.templating = #bfb |
|
381 | logview.pylons.templating = #bfb | |
378 | logview.pylons.util = #eee |
|
382 | logview.pylons.util = #eee | |
379 |
|
383 | |||
380 | ######################################################### |
|
384 | ######################################################### | |
381 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
385 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### | |
382 | ######################################################### |
|
386 | ######################################################### | |
383 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db |
|
387 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db | |
384 | sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode |
|
388 | sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode | |
385 | sqlalchemy.db1.echo = false |
|
389 | sqlalchemy.db1.echo = false | |
386 | sqlalchemy.db1.pool_recycle = 3600 |
|
390 | sqlalchemy.db1.pool_recycle = 3600 | |
387 | sqlalchemy.db1.convert_unicode = true |
|
391 | sqlalchemy.db1.convert_unicode = true | |
388 |
|
392 | |||
389 | ################################ |
|
393 | ################################ | |
390 | ### LOGGING CONFIGURATION #### |
|
394 | ### LOGGING CONFIGURATION #### | |
391 | ################################ |
|
395 | ################################ | |
392 | [loggers] |
|
396 | [loggers] | |
393 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer |
|
397 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer | |
394 |
|
398 | |||
395 | [handlers] |
|
399 | [handlers] | |
396 | keys = console, console_sql |
|
400 | keys = console, console_sql | |
397 |
|
401 | |||
398 | [formatters] |
|
402 | [formatters] | |
399 | keys = generic, color_formatter, color_formatter_sql |
|
403 | keys = generic, color_formatter, color_formatter_sql | |
400 |
|
404 | |||
401 | ############# |
|
405 | ############# | |
402 | ## LOGGERS ## |
|
406 | ## LOGGERS ## | |
403 | ############# |
|
407 | ############# | |
404 | [logger_root] |
|
408 | [logger_root] | |
405 | level = NOTSET |
|
409 | level = NOTSET | |
406 | handlers = console |
|
410 | handlers = console | |
407 |
|
411 | |||
408 | [logger_routes] |
|
412 | [logger_routes] | |
409 | level = DEBUG |
|
413 | level = DEBUG | |
410 | handlers = |
|
414 | handlers = | |
411 | qualname = routes.middleware |
|
415 | qualname = routes.middleware | |
412 | ## "level = DEBUG" logs the route matched and routing variables. |
|
416 | ## "level = DEBUG" logs the route matched and routing variables. | |
413 | propagate = 1 |
|
417 | propagate = 1 | |
414 |
|
418 | |||
415 | [logger_beaker] |
|
419 | [logger_beaker] | |
416 | level = DEBUG |
|
420 | level = DEBUG | |
417 | handlers = |
|
421 | handlers = | |
418 | qualname = beaker.container |
|
422 | qualname = beaker.container | |
419 | propagate = 1 |
|
423 | propagate = 1 | |
420 |
|
424 | |||
421 | [logger_templates] |
|
425 | [logger_templates] | |
422 | level = INFO |
|
426 | level = INFO | |
423 | handlers = |
|
427 | handlers = | |
424 | qualname = pylons.templating |
|
428 | qualname = pylons.templating | |
425 | propagate = 1 |
|
429 | propagate = 1 | |
426 |
|
430 | |||
427 | [logger_rhodecode] |
|
431 | [logger_rhodecode] | |
428 | level = DEBUG |
|
432 | level = DEBUG | |
429 | handlers = |
|
433 | handlers = | |
430 | qualname = rhodecode |
|
434 | qualname = rhodecode | |
431 | propagate = 1 |
|
435 | propagate = 1 | |
432 |
|
436 | |||
433 | [logger_sqlalchemy] |
|
437 | [logger_sqlalchemy] | |
434 | level = INFO |
|
438 | level = INFO | |
435 | handlers = console_sql |
|
439 | handlers = console_sql | |
436 | qualname = sqlalchemy.engine |
|
440 | qualname = sqlalchemy.engine | |
437 | propagate = 0 |
|
441 | propagate = 0 | |
438 |
|
442 | |||
439 | [logger_whoosh_indexer] |
|
443 | [logger_whoosh_indexer] | |
440 | level = DEBUG |
|
444 | level = DEBUG | |
441 | handlers = |
|
445 | handlers = | |
442 | qualname = whoosh_indexer |
|
446 | qualname = whoosh_indexer | |
443 | propagate = 1 |
|
447 | propagate = 1 | |
444 |
|
448 | |||
445 | ############## |
|
449 | ############## | |
446 | ## HANDLERS ## |
|
450 | ## HANDLERS ## | |
447 | ############## |
|
451 | ############## | |
448 |
|
452 | |||
449 | [handler_console] |
|
453 | [handler_console] | |
450 | class = StreamHandler |
|
454 | class = StreamHandler | |
451 | args = (sys.stderr,) |
|
455 | args = (sys.stderr,) | |
452 | level = INFO |
|
456 | level = INFO | |
453 | formatter = generic |
|
457 | formatter = generic | |
454 |
|
458 | |||
455 | [handler_console_sql] |
|
459 | [handler_console_sql] | |
456 | class = StreamHandler |
|
460 | class = StreamHandler | |
457 | args = (sys.stderr,) |
|
461 | args = (sys.stderr,) | |
458 | level = WARN |
|
462 | level = WARN | |
459 | formatter = generic |
|
463 | formatter = generic | |
460 |
|
464 | |||
461 | ################ |
|
465 | ################ | |
462 | ## FORMATTERS ## |
|
466 | ## FORMATTERS ## | |
463 | ################ |
|
467 | ################ | |
464 |
|
468 | |||
465 | [formatter_generic] |
|
469 | [formatter_generic] | |
466 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
470 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
467 | datefmt = %Y-%m-%d %H:%M:%S |
|
471 | datefmt = %Y-%m-%d %H:%M:%S | |
468 |
|
472 | |||
469 | [formatter_color_formatter] |
|
473 | [formatter_color_formatter] | |
470 | class=rhodecode.lib.colored_formatter.ColorFormatter |
|
474 | class=rhodecode.lib.colored_formatter.ColorFormatter | |
471 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
475 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
472 | datefmt = %Y-%m-%d %H:%M:%S |
|
476 | datefmt = %Y-%m-%d %H:%M:%S | |
473 |
|
477 | |||
474 | [formatter_color_formatter_sql] |
|
478 | [formatter_color_formatter_sql] | |
475 | class=rhodecode.lib.colored_formatter.ColorFormatterSql |
|
479 | class=rhodecode.lib.colored_formatter.ColorFormatterSql | |
476 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
480 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
477 | datefmt = %Y-%m-%d %H:%M:%S |
|
481 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,487 +1,491 b'' | |||||
1 | ################################################################################ |
|
1 | ################################################################################ | |
2 | ################################################################################ |
|
2 | ################################################################################ | |
3 | # RhodeCode - Pylons environment configuration # |
|
3 | # RhodeCode - Pylons environment configuration # | |
4 | # # |
|
4 | # # | |
5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
5 | # The %(here)s variable will be replaced with the parent directory of this file# | |
6 | ################################################################################ |
|
6 | ################################################################################ | |
7 |
|
7 | |||
8 | [DEFAULT] |
|
8 | [DEFAULT] | |
9 | debug = true |
|
9 | debug = true | |
10 | pdebug = false |
|
10 | pdebug = false | |
11 | ################################################################################ |
|
11 | ################################################################################ | |
12 | ## Uncomment and replace with the address which should receive ## |
|
12 | ## Uncomment and replace with the address which should receive ## | |
13 | ## any error reports after application crash ## |
|
13 | ## any error reports after application crash ## | |
14 | ## Additionally those settings will be used by RhodeCode mailing system ## |
|
14 | ## Additionally those settings will be used by RhodeCode mailing system ## | |
15 | ################################################################################ |
|
15 | ################################################################################ | |
16 | #email_to = admin@localhost |
|
16 | #email_to = admin@localhost | |
17 | #error_email_from = paste_error@localhost |
|
17 | #error_email_from = paste_error@localhost | |
18 | #app_email_from = rhodecode-noreply@localhost |
|
18 | #app_email_from = rhodecode-noreply@localhost | |
19 | #error_message = |
|
19 | #error_message = | |
20 | #email_prefix = [RhodeCode] |
|
20 | #email_prefix = [RhodeCode] | |
21 |
|
21 | |||
22 | #smtp_server = mail.server.com |
|
22 | #smtp_server = mail.server.com | |
23 | #smtp_username = |
|
23 | #smtp_username = | |
24 | #smtp_password = |
|
24 | #smtp_password = | |
25 | #smtp_port = |
|
25 | #smtp_port = | |
26 | #smtp_use_tls = false |
|
26 | #smtp_use_tls = false | |
27 | #smtp_use_ssl = true |
|
27 | #smtp_use_ssl = true | |
28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) | |
29 | #smtp_auth = |
|
29 | #smtp_auth = | |
30 |
|
30 | |||
31 | [server:main] |
|
31 | [server:main] | |
32 | ## PASTE |
|
32 | ## PASTE | |
33 | ## nr of threads to spawn |
|
33 | ## nr of threads to spawn | |
34 | #threadpool_workers = 5 |
|
34 | #threadpool_workers = 5 | |
35 |
|
35 | |||
36 | ## max request before thread respawn |
|
36 | ## max request before thread respawn | |
37 | #threadpool_max_requests = 10 |
|
37 | #threadpool_max_requests = 10 | |
38 |
|
38 | |||
39 | ## option to use threads of process |
|
39 | ## option to use threads of process | |
40 | #use_threadpool = true |
|
40 | #use_threadpool = true | |
41 |
|
41 | |||
42 | #use = egg:Paste#http |
|
42 | #use = egg:Paste#http | |
43 |
|
43 | |||
44 | ## WAITRESS |
|
44 | ## WAITRESS | |
45 | threads = 5 |
|
45 | threads = 5 | |
46 | ## 100GB |
|
46 | ## 100GB | |
47 | max_request_body_size = 107374182400 |
|
47 | max_request_body_size = 107374182400 | |
48 | use = egg:waitress#main |
|
48 | use = egg:waitress#main | |
49 |
|
49 | |||
50 | host = 127.0.0.1 |
|
50 | host = 127.0.0.1 | |
51 | port = 5000 |
|
51 | port = 5000 | |
52 |
|
52 | |||
53 | ## prefix middleware for rc |
|
53 | ## prefix middleware for rc | |
54 | #[filter:proxy-prefix] |
|
54 | #[filter:proxy-prefix] | |
55 | #use = egg:PasteDeploy#prefix |
|
55 | #use = egg:PasteDeploy#prefix | |
56 | #prefix = /<your-prefix> |
|
56 | #prefix = /<your-prefix> | |
57 |
|
57 | |||
58 | [app:main] |
|
58 | [app:main] | |
59 | use = egg:rhodecode |
|
59 | use = egg:rhodecode | |
60 | ## enable proxy prefix middleware |
|
60 | ## enable proxy prefix middleware | |
61 | #filter-with = proxy-prefix |
|
61 | #filter-with = proxy-prefix | |
62 |
|
62 | |||
63 | full_stack = true |
|
63 | full_stack = true | |
64 | static_files = true |
|
64 | static_files = true | |
65 | ## Optional Languages |
|
65 | ## Optional Languages | |
66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl |
|
66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl | |
67 | lang = en |
|
67 | lang = en | |
68 | cache_dir = %(here)s/data |
|
68 | cache_dir = %(here)s/data | |
69 | index_dir = %(here)s/data/index |
|
69 | index_dir = %(here)s/data/index | |
70 |
|
70 | |||
71 | ## uncomment and set this path to use archive download cache |
|
71 | ## uncomment and set this path to use archive download cache | |
72 | #archive_cache_dir = /tmp/tarballcache |
|
72 | #archive_cache_dir = /tmp/tarballcache | |
73 |
|
73 | |||
74 | ## change this to unique ID for security |
|
74 | ## change this to unique ID for security | |
75 | app_instance_uuid = ${app_instance_uuid} |
|
75 | app_instance_uuid = ${app_instance_uuid} | |
76 |
|
76 | |||
77 | ## cut off limit for large diffs (size in bytes) |
|
77 | ## cut off limit for large diffs (size in bytes) | |
78 | cut_off_limit = 256000 |
|
78 | cut_off_limit = 256000 | |
79 |
|
79 | |||
80 | ## use cache version of scm repo everywhere |
|
80 | ## use cache version of scm repo everywhere | |
81 | vcs_full_cache = true |
|
81 | vcs_full_cache = true | |
82 |
|
82 | |||
83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https | |
84 | force_https = false |
|
84 | force_https = false | |
85 |
|
85 | |||
86 | ## use Strict-Transport-Security headers |
|
86 | ## use Strict-Transport-Security headers | |
87 | use_htsts = false |
|
87 | use_htsts = false | |
88 |
|
88 | |||
89 | ## number of commits stats will parse on each iteration |
|
89 | ## number of commits stats will parse on each iteration | |
90 | commit_parse_limit = 25 |
|
90 | commit_parse_limit = 25 | |
91 |
|
91 | |||
92 | ## number of items displayed in lightweight dashboard before paginating is shown |
|
92 | ## number of items displayed in lightweight dashboard before paginating is shown | |
93 | dashboard_items = 100 |
|
93 | dashboard_items = 100 | |
94 |
|
94 | |||
95 | ## use gravatar service to display avatars |
|
95 | ## use gravatar service to display avatars | |
96 | use_gravatar = true |
|
96 | use_gravatar = true | |
97 |
|
97 | |||
98 | ## path to git executable |
|
98 | ## path to git executable | |
99 | git_path = git |
|
99 | git_path = git | |
100 |
|
100 | |||
|
101 | ## git rev filter option, --all is the default filter, if you need to | |||
|
102 | ## hide all refs in changelog switch this to --branches --tags | |||
|
103 | git_rev_filter=--all | |||
|
104 | ||||
101 | ## RSS feed options |
|
105 | ## RSS feed options | |
102 | rss_cut_off_limit = 256000 |
|
106 | rss_cut_off_limit = 256000 | |
103 | rss_items_per_page = 10 |
|
107 | rss_items_per_page = 10 | |
104 | rss_include_diff = false |
|
108 | rss_include_diff = false | |
105 |
|
109 | |||
106 | ## show hash options for changelog |
|
110 | ## show hash options for changelog | |
107 | sha_len = 12 |
|
111 | sha_len = 12 | |
108 | sha_rev = true |
|
112 | sha_rev = true | |
109 |
|
113 | |||
110 |
|
114 | |||
111 | ## alternative_gravatar_url allows you to use your own avatar server application |
|
115 | ## alternative_gravatar_url allows you to use your own avatar server application | |
112 | ## the following parts of the URL will be replaced |
|
116 | ## the following parts of the URL will be replaced | |
113 | ## {email} user email |
|
117 | ## {email} user email | |
114 | ## {md5email} md5 hash of the user email (like at gravatar.com) |
|
118 | ## {md5email} md5 hash of the user email (like at gravatar.com) | |
115 | ## {size} size of the image that is expected from the server application |
|
119 | ## {size} size of the image that is expected from the server application | |
116 | ## {scheme} http/https from RhodeCode server |
|
120 | ## {scheme} http/https from RhodeCode server | |
117 | ## {netloc} network location from RhodeCode server |
|
121 | ## {netloc} network location from RhodeCode server | |
118 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} |
|
122 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} | |
119 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} |
|
123 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} | |
120 |
|
124 | |||
121 |
|
125 | |||
122 | ## container auth options |
|
126 | ## container auth options | |
123 | container_auth_enabled = false |
|
127 | container_auth_enabled = false | |
124 | proxypass_auth_enabled = false |
|
128 | proxypass_auth_enabled = false | |
125 |
|
129 | |||
126 | ## default encoding used to convert from and to unicode |
|
130 | ## default encoding used to convert from and to unicode | |
127 | ## can be also a comma seperated list of encoding in case of mixed encodings |
|
131 | ## can be also a comma seperated list of encoding in case of mixed encodings | |
128 | default_encoding = utf8 |
|
132 | default_encoding = utf8 | |
129 |
|
133 | |||
130 | ## overwrite schema of clone url |
|
134 | ## overwrite schema of clone url | |
131 | ## available vars: |
|
135 | ## available vars: | |
132 | ## scheme - http/https |
|
136 | ## scheme - http/https | |
133 | ## user - current user |
|
137 | ## user - current user | |
134 | ## pass - password |
|
138 | ## pass - password | |
135 | ## netloc - network location |
|
139 | ## netloc - network location | |
136 | ## path - usually repo_name |
|
140 | ## path - usually repo_name | |
137 |
|
141 | |||
138 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} |
|
142 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} | |
139 |
|
143 | |||
140 | ## issue tracking mapping for commits messages |
|
144 | ## issue tracking mapping for commits messages | |
141 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
145 | ## comment out issue_pat, issue_server, issue_prefix to enable | |
142 |
|
146 | |||
143 | ## pattern to get the issues from commit messages |
|
147 | ## pattern to get the issues from commit messages | |
144 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
148 | ## default one used here is #<numbers> with a regex passive group for `#` | |
145 | ## {id} will be all groups matched from this pattern |
|
149 | ## {id} will be all groups matched from this pattern | |
146 |
|
150 | |||
147 | issue_pat = (?:\s*#)(\d+) |
|
151 | issue_pat = (?:\s*#)(\d+) | |
148 |
|
152 | |||
149 | ## server url to the issue, each {id} will be replaced with match |
|
153 | ## server url to the issue, each {id} will be replaced with match | |
150 | ## fetched from the regex and {repo} is replaced with full repository name |
|
154 | ## fetched from the regex and {repo} is replaced with full repository name | |
151 | ## including groups {repo_name} is replaced with just name of repo |
|
155 | ## including groups {repo_name} is replaced with just name of repo | |
152 |
|
156 | |||
153 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} |
|
157 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} | |
154 |
|
158 | |||
155 | ## prefix to add to link to indicate it's an url |
|
159 | ## prefix to add to link to indicate it's an url | |
156 | ## #314 will be replaced by <issue_prefix><id> |
|
160 | ## #314 will be replaced by <issue_prefix><id> | |
157 |
|
161 | |||
158 | issue_prefix = # |
|
162 | issue_prefix = # | |
159 |
|
163 | |||
160 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
164 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify | |
161 | ## multiple patterns, to other issues server, wiki or others |
|
165 | ## multiple patterns, to other issues server, wiki or others | |
162 | ## below an example how to create a wiki pattern |
|
166 | ## below an example how to create a wiki pattern | |
163 | # #wiki-some-id -> https://mywiki.com/some-id |
|
167 | # #wiki-some-id -> https://mywiki.com/some-id | |
164 |
|
168 | |||
165 | #issue_pat_wiki = (?:wiki-)(.+) |
|
169 | #issue_pat_wiki = (?:wiki-)(.+) | |
166 | #issue_server_link_wiki = https://mywiki.com/{id} |
|
170 | #issue_server_link_wiki = https://mywiki.com/{id} | |
167 | #issue_prefix_wiki = WIKI- |
|
171 | #issue_prefix_wiki = WIKI- | |
168 |
|
172 | |||
169 |
|
173 | |||
170 | ## instance-id prefix |
|
174 | ## instance-id prefix | |
171 | ## a prefix key for this instance used for cache invalidation when running |
|
175 | ## a prefix key for this instance used for cache invalidation when running | |
172 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
176 | ## multiple instances of rhodecode, make sure it's globally unique for | |
173 | ## all running rhodecode instances. Leave empty if you don't use it |
|
177 | ## all running rhodecode instances. Leave empty if you don't use it | |
174 | instance_id = |
|
178 | instance_id = | |
175 |
|
179 | |||
176 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
180 | ## alternative return HTTP header for failed authentication. Default HTTP | |
177 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
181 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
178 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
182 | ## handling that. Set this variable to 403 to return HTTPForbidden | |
179 | auth_ret_code = |
|
183 | auth_ret_code = | |
180 |
|
184 | |||
181 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
185 | ## locking return code. When repository is locked return this HTTP code. 2XX | |
182 | ## codes don't break the transactions while 4XX codes do |
|
186 | ## codes don't break the transactions while 4XX codes do | |
183 | lock_ret_code = 423 |
|
187 | lock_ret_code = 423 | |
184 |
|
188 | |||
185 |
|
189 | |||
186 | #################################### |
|
190 | #################################### | |
187 | ### CELERY CONFIG #### |
|
191 | ### CELERY CONFIG #### | |
188 | #################################### |
|
192 | #################################### | |
189 | use_celery = false |
|
193 | use_celery = false | |
190 | broker.host = localhost |
|
194 | broker.host = localhost | |
191 | broker.vhost = rabbitmqhost |
|
195 | broker.vhost = rabbitmqhost | |
192 | broker.port = 5672 |
|
196 | broker.port = 5672 | |
193 | broker.user = rabbitmq |
|
197 | broker.user = rabbitmq | |
194 | broker.password = qweqwe |
|
198 | broker.password = qweqwe | |
195 |
|
199 | |||
196 | celery.imports = rhodecode.lib.celerylib.tasks |
|
200 | celery.imports = rhodecode.lib.celerylib.tasks | |
197 |
|
201 | |||
198 | celery.result.backend = amqp |
|
202 | celery.result.backend = amqp | |
199 | celery.result.dburi = amqp:// |
|
203 | celery.result.dburi = amqp:// | |
200 | celery.result.serialier = json |
|
204 | celery.result.serialier = json | |
201 |
|
205 | |||
202 | #celery.send.task.error.emails = true |
|
206 | #celery.send.task.error.emails = true | |
203 | #celery.amqp.task.result.expires = 18000 |
|
207 | #celery.amqp.task.result.expires = 18000 | |
204 |
|
208 | |||
205 | celeryd.concurrency = 2 |
|
209 | celeryd.concurrency = 2 | |
206 | #celeryd.log.file = celeryd.log |
|
210 | #celeryd.log.file = celeryd.log | |
207 | celeryd.log.level = debug |
|
211 | celeryd.log.level = debug | |
208 | celeryd.max.tasks.per.child = 1 |
|
212 | celeryd.max.tasks.per.child = 1 | |
209 |
|
213 | |||
210 | ## tasks will never be sent to the queue, but executed locally instead. |
|
214 | ## tasks will never be sent to the queue, but executed locally instead. | |
211 | celery.always.eager = false |
|
215 | celery.always.eager = false | |
212 |
|
216 | |||
213 | #################################### |
|
217 | #################################### | |
214 | ### BEAKER CACHE #### |
|
218 | ### BEAKER CACHE #### | |
215 | #################################### |
|
219 | #################################### | |
216 | beaker.cache.data_dir=%(here)s/data/cache/data |
|
220 | beaker.cache.data_dir=%(here)s/data/cache/data | |
217 | beaker.cache.lock_dir=%(here)s/data/cache/lock |
|
221 | beaker.cache.lock_dir=%(here)s/data/cache/lock | |
218 |
|
222 | |||
219 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long |
|
223 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long | |
220 |
|
224 | |||
221 | beaker.cache.super_short_term.type=memory |
|
225 | beaker.cache.super_short_term.type=memory | |
222 | beaker.cache.super_short_term.expire=10 |
|
226 | beaker.cache.super_short_term.expire=10 | |
223 | beaker.cache.super_short_term.key_length = 256 |
|
227 | beaker.cache.super_short_term.key_length = 256 | |
224 |
|
228 | |||
225 | beaker.cache.short_term.type=memory |
|
229 | beaker.cache.short_term.type=memory | |
226 | beaker.cache.short_term.expire=60 |
|
230 | beaker.cache.short_term.expire=60 | |
227 | beaker.cache.short_term.key_length = 256 |
|
231 | beaker.cache.short_term.key_length = 256 | |
228 |
|
232 | |||
229 | beaker.cache.long_term.type=memory |
|
233 | beaker.cache.long_term.type=memory | |
230 | beaker.cache.long_term.expire=36000 |
|
234 | beaker.cache.long_term.expire=36000 | |
231 | beaker.cache.long_term.key_length = 256 |
|
235 | beaker.cache.long_term.key_length = 256 | |
232 |
|
236 | |||
233 | beaker.cache.sql_cache_short.type=memory |
|
237 | beaker.cache.sql_cache_short.type=memory | |
234 | beaker.cache.sql_cache_short.expire=10 |
|
238 | beaker.cache.sql_cache_short.expire=10 | |
235 | beaker.cache.sql_cache_short.key_length = 256 |
|
239 | beaker.cache.sql_cache_short.key_length = 256 | |
236 |
|
240 | |||
237 | beaker.cache.sql_cache_med.type=memory |
|
241 | beaker.cache.sql_cache_med.type=memory | |
238 | beaker.cache.sql_cache_med.expire=360 |
|
242 | beaker.cache.sql_cache_med.expire=360 | |
239 | beaker.cache.sql_cache_med.key_length = 256 |
|
243 | beaker.cache.sql_cache_med.key_length = 256 | |
240 |
|
244 | |||
241 | beaker.cache.sql_cache_long.type=file |
|
245 | beaker.cache.sql_cache_long.type=file | |
242 | beaker.cache.sql_cache_long.expire=3600 |
|
246 | beaker.cache.sql_cache_long.expire=3600 | |
243 | beaker.cache.sql_cache_long.key_length = 256 |
|
247 | beaker.cache.sql_cache_long.key_length = 256 | |
244 |
|
248 | |||
245 | #################################### |
|
249 | #################################### | |
246 | ### BEAKER SESSION #### |
|
250 | ### BEAKER SESSION #### | |
247 | #################################### |
|
251 | #################################### | |
248 | ## Type of storage used for the session, current types are |
|
252 | ## Type of storage used for the session, current types are | |
249 | ## dbm, file, memcached, database, and memory. |
|
253 | ## dbm, file, memcached, database, and memory. | |
250 | ## The storage uses the Container API |
|
254 | ## The storage uses the Container API | |
251 | ## that is also used by the cache system. |
|
255 | ## that is also used by the cache system. | |
252 |
|
256 | |||
253 | ## db session ## |
|
257 | ## db session ## | |
254 | #beaker.session.type = ext:database |
|
258 | #beaker.session.type = ext:database | |
255 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode |
|
259 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode | |
256 | #beaker.session.table_name = db_session |
|
260 | #beaker.session.table_name = db_session | |
257 |
|
261 | |||
258 | ## encrypted cookie client side session, good for many instances ## |
|
262 | ## encrypted cookie client side session, good for many instances ## | |
259 | #beaker.session.type = cookie |
|
263 | #beaker.session.type = cookie | |
260 |
|
264 | |||
261 | ## file based cookies (default) ## |
|
265 | ## file based cookies (default) ## | |
262 | #beaker.session.type = file |
|
266 | #beaker.session.type = file | |
263 |
|
267 | |||
264 |
|
268 | |||
265 | beaker.session.key = rhodecode |
|
269 | beaker.session.key = rhodecode | |
266 | ## secure cookie requires AES python libraries |
|
270 | ## secure cookie requires AES python libraries | |
267 | #beaker.session.encrypt_key = <key_for_encryption> |
|
271 | #beaker.session.encrypt_key = <key_for_encryption> | |
268 | #beaker.session.validate_key = <validation_key> |
|
272 | #beaker.session.validate_key = <validation_key> | |
269 |
|
273 | |||
270 | ## sets session as invalid if it haven't been accessed for given amount of time |
|
274 | ## sets session as invalid if it haven't been accessed for given amount of time | |
271 | beaker.session.timeout = 2592000 |
|
275 | beaker.session.timeout = 2592000 | |
272 | beaker.session.httponly = true |
|
276 | beaker.session.httponly = true | |
273 | #beaker.session.cookie_path = /<your-prefix> |
|
277 | #beaker.session.cookie_path = /<your-prefix> | |
274 |
|
278 | |||
275 | ## uncomment for https secure cookie |
|
279 | ## uncomment for https secure cookie | |
276 | beaker.session.secure = false |
|
280 | beaker.session.secure = false | |
277 |
|
281 | |||
278 | ## auto save the session to not to use .save() |
|
282 | ## auto save the session to not to use .save() | |
279 | beaker.session.auto = False |
|
283 | beaker.session.auto = False | |
280 |
|
284 | |||
281 | ## default cookie expiration time in seconds `true` expire at browser close ## |
|
285 | ## default cookie expiration time in seconds `true` expire at browser close ## | |
282 | #beaker.session.cookie_expires = 3600 |
|
286 | #beaker.session.cookie_expires = 3600 | |
283 |
|
287 | |||
284 |
|
288 | |||
285 | ############################ |
|
289 | ############################ | |
286 | ## ERROR HANDLING SYSTEMS ## |
|
290 | ## ERROR HANDLING SYSTEMS ## | |
287 | ############################ |
|
291 | ############################ | |
288 |
|
292 | |||
289 | #################### |
|
293 | #################### | |
290 | ### [errormator] ### |
|
294 | ### [errormator] ### | |
291 | #################### |
|
295 | #################### | |
292 |
|
296 | |||
293 | ## Errormator is tailored to work with RhodeCode, see |
|
297 | ## Errormator is tailored to work with RhodeCode, see | |
294 | ## http://errormator.com for details how to obtain an account |
|
298 | ## http://errormator.com for details how to obtain an account | |
295 | ## you must install python package `errormator_client` to make it work |
|
299 | ## you must install python package `errormator_client` to make it work | |
296 |
|
300 | |||
297 | ## errormator enabled |
|
301 | ## errormator enabled | |
298 | errormator = false |
|
302 | errormator = false | |
299 |
|
303 | |||
300 | errormator.server_url = https://api.errormator.com |
|
304 | errormator.server_url = https://api.errormator.com | |
301 | errormator.api_key = YOUR_API_KEY |
|
305 | errormator.api_key = YOUR_API_KEY | |
302 |
|
306 | |||
303 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
307 | ## TWEAK AMOUNT OF INFO SENT HERE | |
304 |
|
308 | |||
305 | ## enables 404 error logging (default False) |
|
309 | ## enables 404 error logging (default False) | |
306 | errormator.report_404 = false |
|
310 | errormator.report_404 = false | |
307 |
|
311 | |||
308 | ## time in seconds after request is considered being slow (default 1) |
|
312 | ## time in seconds after request is considered being slow (default 1) | |
309 | errormator.slow_request_time = 1 |
|
313 | errormator.slow_request_time = 1 | |
310 |
|
314 | |||
311 | ## record slow requests in application |
|
315 | ## record slow requests in application | |
312 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
316 | ## (needs to be enabled for slow datastore recording and time tracking) | |
313 | errormator.slow_requests = true |
|
317 | errormator.slow_requests = true | |
314 |
|
318 | |||
315 | ## enable hooking to application loggers |
|
319 | ## enable hooking to application loggers | |
316 | # errormator.logging = true |
|
320 | # errormator.logging = true | |
317 |
|
321 | |||
318 | ## minimum log level for log capture |
|
322 | ## minimum log level for log capture | |
319 | # errormator.logging.level = WARNING |
|
323 | # errormator.logging.level = WARNING | |
320 |
|
324 | |||
321 | ## send logs only from erroneous/slow requests |
|
325 | ## send logs only from erroneous/slow requests | |
322 | ## (saves API quota for intensive logging) |
|
326 | ## (saves API quota for intensive logging) | |
323 | errormator.logging_on_error = false |
|
327 | errormator.logging_on_error = false | |
324 |
|
328 | |||
325 | ## list of additonal keywords that should be grabbed from environ object |
|
329 | ## list of additonal keywords that should be grabbed from environ object | |
326 | ## can be string with comma separated list of words in lowercase |
|
330 | ## can be string with comma separated list of words in lowercase | |
327 | ## (by default client will always send following info: |
|
331 | ## (by default client will always send following info: | |
328 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
332 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that | |
329 | ## start with HTTP* this list be extended with additional keywords here |
|
333 | ## start with HTTP* this list be extended with additional keywords here | |
330 | errormator.environ_keys_whitelist = |
|
334 | errormator.environ_keys_whitelist = | |
331 |
|
335 | |||
332 |
|
336 | |||
333 | ## list of keywords that should be blanked from request object |
|
337 | ## list of keywords that should be blanked from request object | |
334 | ## can be string with comma separated list of words in lowercase |
|
338 | ## can be string with comma separated list of words in lowercase | |
335 | ## (by default client will always blank keys that contain following words |
|
339 | ## (by default client will always blank keys that contain following words | |
336 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
340 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' | |
337 | ## this list be extended with additional keywords set here |
|
341 | ## this list be extended with additional keywords set here | |
338 | errormator.request_keys_blacklist = |
|
342 | errormator.request_keys_blacklist = | |
339 |
|
343 | |||
340 |
|
344 | |||
341 | ## list of namespaces that should be ignores when gathering log entries |
|
345 | ## list of namespaces that should be ignores when gathering log entries | |
342 | ## can be string with comma separated list of namespaces |
|
346 | ## can be string with comma separated list of namespaces | |
343 | ## (by default the client ignores own entries: errormator_client.client) |
|
347 | ## (by default the client ignores own entries: errormator_client.client) | |
344 | errormator.log_namespace_blacklist = |
|
348 | errormator.log_namespace_blacklist = | |
345 |
|
349 | |||
346 |
|
350 | |||
347 | ################ |
|
351 | ################ | |
348 | ### [sentry] ### |
|
352 | ### [sentry] ### | |
349 | ################ |
|
353 | ################ | |
350 |
|
354 | |||
351 | ## sentry is a alternative open source error aggregator |
|
355 | ## sentry is a alternative open source error aggregator | |
352 | ## you must install python packages `sentry` and `raven` to enable |
|
356 | ## you must install python packages `sentry` and `raven` to enable | |
353 |
|
357 | |||
354 | sentry.dsn = YOUR_DNS |
|
358 | sentry.dsn = YOUR_DNS | |
355 | sentry.servers = |
|
359 | sentry.servers = | |
356 | sentry.name = |
|
360 | sentry.name = | |
357 | sentry.key = |
|
361 | sentry.key = | |
358 | sentry.public_key = |
|
362 | sentry.public_key = | |
359 | sentry.secret_key = |
|
363 | sentry.secret_key = | |
360 | sentry.project = |
|
364 | sentry.project = | |
361 | sentry.site = |
|
365 | sentry.site = | |
362 | sentry.include_paths = |
|
366 | sentry.include_paths = | |
363 | sentry.exclude_paths = |
|
367 | sentry.exclude_paths = | |
364 |
|
368 | |||
365 |
|
369 | |||
366 | ################################################################################ |
|
370 | ################################################################################ | |
367 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
371 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## | |
368 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
372 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## | |
369 | ## execute malicious code after an exception is raised. ## |
|
373 | ## execute malicious code after an exception is raised. ## | |
370 | ################################################################################ |
|
374 | ################################################################################ | |
371 | set debug = false |
|
375 | set debug = false | |
372 |
|
376 | |||
373 | ################################## |
|
377 | ################################## | |
374 | ### LOGVIEW CONFIG ### |
|
378 | ### LOGVIEW CONFIG ### | |
375 | ################################## |
|
379 | ################################## | |
376 | logview.sqlalchemy = #faa |
|
380 | logview.sqlalchemy = #faa | |
377 | logview.pylons.templating = #bfb |
|
381 | logview.pylons.templating = #bfb | |
378 | logview.pylons.util = #eee |
|
382 | logview.pylons.util = #eee | |
379 |
|
383 | |||
380 | ######################################################### |
|
384 | ######################################################### | |
381 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
385 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### | |
382 | ######################################################### |
|
386 | ######################################################### | |
383 |
|
387 | |||
384 | # SQLITE [default] |
|
388 | # SQLITE [default] | |
385 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db |
|
389 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db | |
386 |
|
390 | |||
387 | # POSTGRESQL |
|
391 | # POSTGRESQL | |
388 | # sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode |
|
392 | # sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode | |
389 |
|
393 | |||
390 | # MySQL |
|
394 | # MySQL | |
391 | # sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode |
|
395 | # sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode | |
392 |
|
396 | |||
393 | # see sqlalchemy docs for others |
|
397 | # see sqlalchemy docs for others | |
394 |
|
398 | |||
395 | sqlalchemy.db1.echo = false |
|
399 | sqlalchemy.db1.echo = false | |
396 | sqlalchemy.db1.pool_recycle = 3600 |
|
400 | sqlalchemy.db1.pool_recycle = 3600 | |
397 | sqlalchemy.db1.convert_unicode = true |
|
401 | sqlalchemy.db1.convert_unicode = true | |
398 |
|
402 | |||
399 | ################################ |
|
403 | ################################ | |
400 | ### LOGGING CONFIGURATION #### |
|
404 | ### LOGGING CONFIGURATION #### | |
401 | ################################ |
|
405 | ################################ | |
402 | [loggers] |
|
406 | [loggers] | |
403 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer |
|
407 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer | |
404 |
|
408 | |||
405 | [handlers] |
|
409 | [handlers] | |
406 | keys = console, console_sql |
|
410 | keys = console, console_sql | |
407 |
|
411 | |||
408 | [formatters] |
|
412 | [formatters] | |
409 | keys = generic, color_formatter, color_formatter_sql |
|
413 | keys = generic, color_formatter, color_formatter_sql | |
410 |
|
414 | |||
411 | ############# |
|
415 | ############# | |
412 | ## LOGGERS ## |
|
416 | ## LOGGERS ## | |
413 | ############# |
|
417 | ############# | |
414 | [logger_root] |
|
418 | [logger_root] | |
415 | level = NOTSET |
|
419 | level = NOTSET | |
416 | handlers = console |
|
420 | handlers = console | |
417 |
|
421 | |||
418 | [logger_routes] |
|
422 | [logger_routes] | |
419 | level = DEBUG |
|
423 | level = DEBUG | |
420 | handlers = |
|
424 | handlers = | |
421 | qualname = routes.middleware |
|
425 | qualname = routes.middleware | |
422 | ## "level = DEBUG" logs the route matched and routing variables. |
|
426 | ## "level = DEBUG" logs the route matched and routing variables. | |
423 | propagate = 1 |
|
427 | propagate = 1 | |
424 |
|
428 | |||
425 | [logger_beaker] |
|
429 | [logger_beaker] | |
426 | level = DEBUG |
|
430 | level = DEBUG | |
427 | handlers = |
|
431 | handlers = | |
428 | qualname = beaker.container |
|
432 | qualname = beaker.container | |
429 | propagate = 1 |
|
433 | propagate = 1 | |
430 |
|
434 | |||
431 | [logger_templates] |
|
435 | [logger_templates] | |
432 | level = INFO |
|
436 | level = INFO | |
433 | handlers = |
|
437 | handlers = | |
434 | qualname = pylons.templating |
|
438 | qualname = pylons.templating | |
435 | propagate = 1 |
|
439 | propagate = 1 | |
436 |
|
440 | |||
437 | [logger_rhodecode] |
|
441 | [logger_rhodecode] | |
438 | level = DEBUG |
|
442 | level = DEBUG | |
439 | handlers = |
|
443 | handlers = | |
440 | qualname = rhodecode |
|
444 | qualname = rhodecode | |
441 | propagate = 1 |
|
445 | propagate = 1 | |
442 |
|
446 | |||
443 | [logger_sqlalchemy] |
|
447 | [logger_sqlalchemy] | |
444 | level = INFO |
|
448 | level = INFO | |
445 | handlers = console_sql |
|
449 | handlers = console_sql | |
446 | qualname = sqlalchemy.engine |
|
450 | qualname = sqlalchemy.engine | |
447 | propagate = 0 |
|
451 | propagate = 0 | |
448 |
|
452 | |||
449 | [logger_whoosh_indexer] |
|
453 | [logger_whoosh_indexer] | |
450 | level = DEBUG |
|
454 | level = DEBUG | |
451 | handlers = |
|
455 | handlers = | |
452 | qualname = whoosh_indexer |
|
456 | qualname = whoosh_indexer | |
453 | propagate = 1 |
|
457 | propagate = 1 | |
454 |
|
458 | |||
455 | ############## |
|
459 | ############## | |
456 | ## HANDLERS ## |
|
460 | ## HANDLERS ## | |
457 | ############## |
|
461 | ############## | |
458 |
|
462 | |||
459 | [handler_console] |
|
463 | [handler_console] | |
460 | class = StreamHandler |
|
464 | class = StreamHandler | |
461 | args = (sys.stderr,) |
|
465 | args = (sys.stderr,) | |
462 | level = INFO |
|
466 | level = INFO | |
463 | formatter = generic |
|
467 | formatter = generic | |
464 |
|
468 | |||
465 | [handler_console_sql] |
|
469 | [handler_console_sql] | |
466 | class = StreamHandler |
|
470 | class = StreamHandler | |
467 | args = (sys.stderr,) |
|
471 | args = (sys.stderr,) | |
468 | level = WARN |
|
472 | level = WARN | |
469 | formatter = generic |
|
473 | formatter = generic | |
470 |
|
474 | |||
471 | ################ |
|
475 | ################ | |
472 | ## FORMATTERS ## |
|
476 | ## FORMATTERS ## | |
473 | ################ |
|
477 | ################ | |
474 |
|
478 | |||
475 | [formatter_generic] |
|
479 | [formatter_generic] | |
476 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
480 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
477 | datefmt = %Y-%m-%d %H:%M:%S |
|
481 | datefmt = %Y-%m-%d %H:%M:%S | |
478 |
|
482 | |||
479 | [formatter_color_formatter] |
|
483 | [formatter_color_formatter] | |
480 | class=rhodecode.lib.colored_formatter.ColorFormatter |
|
484 | class=rhodecode.lib.colored_formatter.ColorFormatter | |
481 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
485 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
482 | datefmt = %Y-%m-%d %H:%M:%S |
|
486 | datefmt = %Y-%m-%d %H:%M:%S | |
483 |
|
487 | |||
484 | [formatter_color_formatter_sql] |
|
488 | [formatter_color_formatter_sql] | |
485 | class=rhodecode.lib.colored_formatter.ColorFormatterSql |
|
489 | class=rhodecode.lib.colored_formatter.ColorFormatterSql | |
486 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
490 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
487 | datefmt = %Y-%m-%d %H:%M:%S |
|
491 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,549 +1,551 b'' | |||||
1 | import re |
|
1 | import re | |
2 | from itertools import chain |
|
2 | from itertools import chain | |
3 | from dulwich import objects |
|
3 | from dulwich import objects | |
4 | from subprocess import Popen, PIPE |
|
4 | from subprocess import Popen, PIPE | |
5 | import rhodecode |
|
5 | import rhodecode | |
6 | from rhodecode.lib.vcs.conf import settings |
|
6 | from rhodecode.lib.vcs.conf import settings | |
7 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
7 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
8 | from rhodecode.lib.vcs.exceptions import ChangesetError |
|
8 | from rhodecode.lib.vcs.exceptions import ChangesetError | |
9 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError |
|
9 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError | |
10 | from rhodecode.lib.vcs.exceptions import VCSError |
|
10 | from rhodecode.lib.vcs.exceptions import VCSError | |
11 | from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError |
|
11 | from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError | |
12 | from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError |
|
12 | from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError | |
13 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyChangeset |
|
13 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyChangeset | |
14 | from rhodecode.lib.vcs.nodes import FileNode, DirNode, NodeKind, RootNode, \ |
|
14 | from rhodecode.lib.vcs.nodes import FileNode, DirNode, NodeKind, RootNode, \ | |
15 | RemovedFileNode, SubModuleNode, ChangedFileNodesGenerator,\ |
|
15 | RemovedFileNode, SubModuleNode, ChangedFileNodesGenerator,\ | |
16 | AddedFileNodesGenerator, RemovedFileNodesGenerator |
|
16 | AddedFileNodesGenerator, RemovedFileNodesGenerator | |
17 | from rhodecode.lib.vcs.utils import safe_unicode |
|
17 | from rhodecode.lib.vcs.utils import safe_unicode | |
18 | from rhodecode.lib.vcs.utils import date_fromtimestamp |
|
18 | from rhodecode.lib.vcs.utils import date_fromtimestamp | |
19 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
19 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
20 | from rhodecode.lib.utils2 import safe_int |
|
20 | from rhodecode.lib.utils2 import safe_int | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | class GitChangeset(BaseChangeset): |
|
23 | class GitChangeset(BaseChangeset): | |
24 | """ |
|
24 | """ | |
25 | Represents state of the repository at single revision. |
|
25 | Represents state of the repository at single revision. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | def __init__(self, repository, revision): |
|
28 | def __init__(self, repository, revision): | |
29 | self._stat_modes = {} |
|
29 | self._stat_modes = {} | |
30 | self.repository = repository |
|
30 | self.repository = repository | |
31 |
|
31 | |||
32 | try: |
|
32 | try: | |
33 | commit = self.repository._repo.get_object(revision) |
|
33 | commit = self.repository._repo.get_object(revision) | |
34 | if isinstance(commit, objects.Tag): |
|
34 | if isinstance(commit, objects.Tag): | |
35 | revision = commit.object[1] |
|
35 | revision = commit.object[1] | |
36 | commit = self.repository._repo.get_object(commit.object[1]) |
|
36 | commit = self.repository._repo.get_object(commit.object[1]) | |
37 | except KeyError: |
|
37 | except KeyError: | |
38 | raise RepositoryError("Cannot get object with id %s" % revision) |
|
38 | raise RepositoryError("Cannot get object with id %s" % revision) | |
39 | self.raw_id = revision |
|
39 | self.raw_id = revision | |
40 | self.id = self.raw_id |
|
40 | self.id = self.raw_id | |
41 | self.short_id = self.raw_id[:12] |
|
41 | self.short_id = self.raw_id[:12] | |
42 | self._commit = commit |
|
42 | self._commit = commit | |
43 |
|
43 | |||
44 | self._tree_id = commit.tree |
|
44 | self._tree_id = commit.tree | |
45 | self._committer_property = 'committer' |
|
45 | self._committer_property = 'committer' | |
46 | self._author_property = 'author' |
|
46 | self._author_property = 'author' | |
47 | self._date_property = 'commit_time' |
|
47 | self._date_property = 'commit_time' | |
48 | self._date_tz_property = 'commit_timezone' |
|
48 | self._date_tz_property = 'commit_timezone' | |
49 | self.revision = repository.revisions.index(revision) |
|
49 | self.revision = repository.revisions.index(revision) | |
50 |
|
50 | |||
51 | self.message = safe_unicode(commit.message) |
|
51 | self.message = safe_unicode(commit.message) | |
52 |
|
52 | |||
53 | self.nodes = {} |
|
53 | self.nodes = {} | |
54 | self._paths = {} |
|
54 | self._paths = {} | |
55 |
|
55 | |||
56 | @LazyProperty |
|
56 | @LazyProperty | |
57 | def committer(self): |
|
57 | def committer(self): | |
58 | return safe_unicode(getattr(self._commit, self._committer_property)) |
|
58 | return safe_unicode(getattr(self._commit, self._committer_property)) | |
59 |
|
59 | |||
60 | @LazyProperty |
|
60 | @LazyProperty | |
61 | def author(self): |
|
61 | def author(self): | |
62 | return safe_unicode(getattr(self._commit, self._author_property)) |
|
62 | return safe_unicode(getattr(self._commit, self._author_property)) | |
63 |
|
63 | |||
64 | @LazyProperty |
|
64 | @LazyProperty | |
65 | def date(self): |
|
65 | def date(self): | |
66 | return date_fromtimestamp(getattr(self._commit, self._date_property), |
|
66 | return date_fromtimestamp(getattr(self._commit, self._date_property), | |
67 | getattr(self._commit, self._date_tz_property)) |
|
67 | getattr(self._commit, self._date_tz_property)) | |
68 |
|
68 | |||
69 | @LazyProperty |
|
69 | @LazyProperty | |
70 | def _timestamp(self): |
|
70 | def _timestamp(self): | |
71 | return getattr(self._commit, self._date_property) |
|
71 | return getattr(self._commit, self._date_property) | |
72 |
|
72 | |||
73 | @LazyProperty |
|
73 | @LazyProperty | |
74 | def status(self): |
|
74 | def status(self): | |
75 | """ |
|
75 | """ | |
76 | Returns modified, added, removed, deleted files for current changeset |
|
76 | Returns modified, added, removed, deleted files for current changeset | |
77 | """ |
|
77 | """ | |
78 | return self.changed, self.added, self.removed |
|
78 | return self.changed, self.added, self.removed | |
79 |
|
79 | |||
80 | @LazyProperty |
|
80 | @LazyProperty | |
81 | def tags(self): |
|
81 | def tags(self): | |
82 | _tags = [] |
|
82 | _tags = [] | |
83 | for tname, tsha in self.repository.tags.iteritems(): |
|
83 | for tname, tsha in self.repository.tags.iteritems(): | |
84 | if tsha == self.raw_id: |
|
84 | if tsha == self.raw_id: | |
85 | _tags.append(tname) |
|
85 | _tags.append(tname) | |
86 | return _tags |
|
86 | return _tags | |
87 |
|
87 | |||
88 | @LazyProperty |
|
88 | @LazyProperty | |
89 | def branch(self): |
|
89 | def branch(self): | |
90 |
|
90 | |||
91 | heads = self.repository._heads(reverse=False) |
|
91 | heads = self.repository._heads(reverse=False) | |
92 |
|
92 | |||
93 | ref = heads.get(self.raw_id) |
|
93 | ref = heads.get(self.raw_id) | |
94 | if ref: |
|
94 | if ref: | |
95 | return safe_unicode(ref) |
|
95 | return safe_unicode(ref) | |
96 |
|
96 | |||
97 | def _fix_path(self, path): |
|
97 | def _fix_path(self, path): | |
98 | """ |
|
98 | """ | |
99 | Paths are stored without trailing slash so we need to get rid off it if |
|
99 | Paths are stored without trailing slash so we need to get rid off it if | |
100 | needed. |
|
100 | needed. | |
101 | """ |
|
101 | """ | |
102 | if path.endswith('/'): |
|
102 | if path.endswith('/'): | |
103 | path = path.rstrip('/') |
|
103 | path = path.rstrip('/') | |
104 | return path |
|
104 | return path | |
105 |
|
105 | |||
106 | def _get_id_for_path(self, path): |
|
106 | def _get_id_for_path(self, path): | |
107 |
|
107 | |||
108 | # FIXME: Please, spare a couple of minutes and make those codes cleaner; |
|
108 | # FIXME: Please, spare a couple of minutes and make those codes cleaner; | |
109 | if not path in self._paths: |
|
109 | if not path in self._paths: | |
110 | path = path.strip('/') |
|
110 | path = path.strip('/') | |
111 | # set root tree |
|
111 | # set root tree | |
112 | tree = self.repository._repo[self._tree_id] |
|
112 | tree = self.repository._repo[self._tree_id] | |
113 | if path == '': |
|
113 | if path == '': | |
114 | self._paths[''] = tree.id |
|
114 | self._paths[''] = tree.id | |
115 | return tree.id |
|
115 | return tree.id | |
116 | splitted = path.split('/') |
|
116 | splitted = path.split('/') | |
117 | dirs, name = splitted[:-1], splitted[-1] |
|
117 | dirs, name = splitted[:-1], splitted[-1] | |
118 | curdir = '' |
|
118 | curdir = '' | |
119 |
|
119 | |||
120 | # initially extract things from root dir |
|
120 | # initially extract things from root dir | |
121 | for item, stat, id in tree.iteritems(): |
|
121 | for item, stat, id in tree.iteritems(): | |
122 | if curdir: |
|
122 | if curdir: | |
123 | name = '/'.join((curdir, item)) |
|
123 | name = '/'.join((curdir, item)) | |
124 | else: |
|
124 | else: | |
125 | name = item |
|
125 | name = item | |
126 | self._paths[name] = id |
|
126 | self._paths[name] = id | |
127 | self._stat_modes[name] = stat |
|
127 | self._stat_modes[name] = stat | |
128 |
|
128 | |||
129 | for dir in dirs: |
|
129 | for dir in dirs: | |
130 | if curdir: |
|
130 | if curdir: | |
131 | curdir = '/'.join((curdir, dir)) |
|
131 | curdir = '/'.join((curdir, dir)) | |
132 | else: |
|
132 | else: | |
133 | curdir = dir |
|
133 | curdir = dir | |
134 | dir_id = None |
|
134 | dir_id = None | |
135 | for item, stat, id in tree.iteritems(): |
|
135 | for item, stat, id in tree.iteritems(): | |
136 | if dir == item: |
|
136 | if dir == item: | |
137 | dir_id = id |
|
137 | dir_id = id | |
138 | if dir_id: |
|
138 | if dir_id: | |
139 | # Update tree |
|
139 | # Update tree | |
140 | tree = self.repository._repo[dir_id] |
|
140 | tree = self.repository._repo[dir_id] | |
141 | if not isinstance(tree, objects.Tree): |
|
141 | if not isinstance(tree, objects.Tree): | |
142 | raise ChangesetError('%s is not a directory' % curdir) |
|
142 | raise ChangesetError('%s is not a directory' % curdir) | |
143 | else: |
|
143 | else: | |
144 | raise ChangesetError('%s have not been found' % curdir) |
|
144 | raise ChangesetError('%s have not been found' % curdir) | |
145 |
|
145 | |||
146 | # cache all items from the given traversed tree |
|
146 | # cache all items from the given traversed tree | |
147 | for item, stat, id in tree.iteritems(): |
|
147 | for item, stat, id in tree.iteritems(): | |
148 | if curdir: |
|
148 | if curdir: | |
149 | name = '/'.join((curdir, item)) |
|
149 | name = '/'.join((curdir, item)) | |
150 | else: |
|
150 | else: | |
151 | name = item |
|
151 | name = item | |
152 | self._paths[name] = id |
|
152 | self._paths[name] = id | |
153 | self._stat_modes[name] = stat |
|
153 | self._stat_modes[name] = stat | |
154 | if not path in self._paths: |
|
154 | if not path in self._paths: | |
155 | raise NodeDoesNotExistError("There is no file nor directory " |
|
155 | raise NodeDoesNotExistError("There is no file nor directory " | |
156 | "at the given path %r at revision %r" |
|
156 | "at the given path %r at revision %r" | |
157 | % (path, self.short_id)) |
|
157 | % (path, self.short_id)) | |
158 | return self._paths[path] |
|
158 | return self._paths[path] | |
159 |
|
159 | |||
160 | def _get_kind(self, path): |
|
160 | def _get_kind(self, path): | |
161 | obj = self.repository._repo[self._get_id_for_path(path)] |
|
161 | obj = self.repository._repo[self._get_id_for_path(path)] | |
162 | if isinstance(obj, objects.Blob): |
|
162 | if isinstance(obj, objects.Blob): | |
163 | return NodeKind.FILE |
|
163 | return NodeKind.FILE | |
164 | elif isinstance(obj, objects.Tree): |
|
164 | elif isinstance(obj, objects.Tree): | |
165 | return NodeKind.DIR |
|
165 | return NodeKind.DIR | |
166 |
|
166 | |||
167 | def _get_filectx(self, path): |
|
167 | def _get_filectx(self, path): | |
168 | path = self._fix_path(path) |
|
168 | path = self._fix_path(path) | |
169 | if self._get_kind(path) != NodeKind.FILE: |
|
169 | if self._get_kind(path) != NodeKind.FILE: | |
170 | raise ChangesetError("File does not exist for revision %r at " |
|
170 | raise ChangesetError("File does not exist for revision %r at " | |
171 | " %r" % (self.raw_id, path)) |
|
171 | " %r" % (self.raw_id, path)) | |
172 | return path |
|
172 | return path | |
173 |
|
173 | |||
174 | def _get_file_nodes(self): |
|
174 | def _get_file_nodes(self): | |
175 | return chain(*(t[2] for t in self.walk())) |
|
175 | return chain(*(t[2] for t in self.walk())) | |
176 |
|
176 | |||
177 | @LazyProperty |
|
177 | @LazyProperty | |
178 | def parents(self): |
|
178 | def parents(self): | |
179 | """ |
|
179 | """ | |
180 | Returns list of parents changesets. |
|
180 | Returns list of parents changesets. | |
181 | """ |
|
181 | """ | |
182 | return [self.repository.get_changeset(parent) |
|
182 | return [self.repository.get_changeset(parent) | |
183 | for parent in self._commit.parents] |
|
183 | for parent in self._commit.parents] | |
184 |
|
184 | |||
185 | @LazyProperty |
|
185 | @LazyProperty | |
186 | def children(self): |
|
186 | def children(self): | |
187 | """ |
|
187 | """ | |
188 | Returns list of children changesets. |
|
188 | Returns list of children changesets. | |
189 | """ |
|
189 | """ | |
|
190 | rev_filter = _git_path = rhodecode.CONFIG.get('git_rev_filter', | |||
|
191 | '--all').strip() | |||
190 | so, se = self.repository.run_git_command( |
|
192 | so, se = self.repository.run_git_command( | |
191 |
"rev-list |
|
193 | "rev-list %s --children | grep '^%s'" % (rev_filter, self.raw_id) | |
192 | ) |
|
194 | ) | |
193 |
|
195 | |||
194 | children = [] |
|
196 | children = [] | |
195 | for l in so.splitlines(): |
|
197 | for l in so.splitlines(): | |
196 | childs = l.split(' ')[1:] |
|
198 | childs = l.split(' ')[1:] | |
197 | children.extend(childs) |
|
199 | children.extend(childs) | |
198 | return [self.repository.get_changeset(cs) for cs in children] |
|
200 | return [self.repository.get_changeset(cs) for cs in children] | |
199 |
|
201 | |||
200 | def next(self, branch=None): |
|
202 | def next(self, branch=None): | |
201 |
|
203 | |||
202 | if branch and self.branch != branch: |
|
204 | if branch and self.branch != branch: | |
203 | raise VCSError('Branch option used on changeset not belonging ' |
|
205 | raise VCSError('Branch option used on changeset not belonging ' | |
204 | 'to that branch') |
|
206 | 'to that branch') | |
205 |
|
207 | |||
206 | def _next(changeset, branch): |
|
208 | def _next(changeset, branch): | |
207 | try: |
|
209 | try: | |
208 | next_ = changeset.revision + 1 |
|
210 | next_ = changeset.revision + 1 | |
209 | next_rev = changeset.repository.revisions[next_] |
|
211 | next_rev = changeset.repository.revisions[next_] | |
210 | except IndexError: |
|
212 | except IndexError: | |
211 | raise ChangesetDoesNotExistError |
|
213 | raise ChangesetDoesNotExistError | |
212 | cs = changeset.repository.get_changeset(next_rev) |
|
214 | cs = changeset.repository.get_changeset(next_rev) | |
213 |
|
215 | |||
214 | if branch and branch != cs.branch: |
|
216 | if branch and branch != cs.branch: | |
215 | return _next(cs, branch) |
|
217 | return _next(cs, branch) | |
216 |
|
218 | |||
217 | return cs |
|
219 | return cs | |
218 |
|
220 | |||
219 | return _next(self, branch) |
|
221 | return _next(self, branch) | |
220 |
|
222 | |||
221 | def prev(self, branch=None): |
|
223 | def prev(self, branch=None): | |
222 | if branch and self.branch != branch: |
|
224 | if branch and self.branch != branch: | |
223 | raise VCSError('Branch option used on changeset not belonging ' |
|
225 | raise VCSError('Branch option used on changeset not belonging ' | |
224 | 'to that branch') |
|
226 | 'to that branch') | |
225 |
|
227 | |||
226 | def _prev(changeset, branch): |
|
228 | def _prev(changeset, branch): | |
227 | try: |
|
229 | try: | |
228 | prev_ = changeset.revision - 1 |
|
230 | prev_ = changeset.revision - 1 | |
229 | if prev_ < 0: |
|
231 | if prev_ < 0: | |
230 | raise IndexError |
|
232 | raise IndexError | |
231 | prev_rev = changeset.repository.revisions[prev_] |
|
233 | prev_rev = changeset.repository.revisions[prev_] | |
232 | except IndexError: |
|
234 | except IndexError: | |
233 | raise ChangesetDoesNotExistError |
|
235 | raise ChangesetDoesNotExistError | |
234 |
|
236 | |||
235 | cs = changeset.repository.get_changeset(prev_rev) |
|
237 | cs = changeset.repository.get_changeset(prev_rev) | |
236 |
|
238 | |||
237 | if branch and branch != cs.branch: |
|
239 | if branch and branch != cs.branch: | |
238 | return _prev(cs, branch) |
|
240 | return _prev(cs, branch) | |
239 |
|
241 | |||
240 | return cs |
|
242 | return cs | |
241 |
|
243 | |||
242 | return _prev(self, branch) |
|
244 | return _prev(self, branch) | |
243 |
|
245 | |||
244 | def diff(self, ignore_whitespace=True, context=3): |
|
246 | def diff(self, ignore_whitespace=True, context=3): | |
245 | rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET |
|
247 | rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET | |
246 | rev2 = self |
|
248 | rev2 = self | |
247 | return ''.join(self.repository.get_diff(rev1, rev2, |
|
249 | return ''.join(self.repository.get_diff(rev1, rev2, | |
248 | ignore_whitespace=ignore_whitespace, |
|
250 | ignore_whitespace=ignore_whitespace, | |
249 | context=context)) |
|
251 | context=context)) | |
250 |
|
252 | |||
251 | def get_file_mode(self, path): |
|
253 | def get_file_mode(self, path): | |
252 | """ |
|
254 | """ | |
253 | Returns stat mode of the file at the given ``path``. |
|
255 | Returns stat mode of the file at the given ``path``. | |
254 | """ |
|
256 | """ | |
255 | # ensure path is traversed |
|
257 | # ensure path is traversed | |
256 | self._get_id_for_path(path) |
|
258 | self._get_id_for_path(path) | |
257 | return self._stat_modes[path] |
|
259 | return self._stat_modes[path] | |
258 |
|
260 | |||
259 | def get_file_content(self, path): |
|
261 | def get_file_content(self, path): | |
260 | """ |
|
262 | """ | |
261 | Returns content of the file at given ``path``. |
|
263 | Returns content of the file at given ``path``. | |
262 | """ |
|
264 | """ | |
263 | id = self._get_id_for_path(path) |
|
265 | id = self._get_id_for_path(path) | |
264 | blob = self.repository._repo[id] |
|
266 | blob = self.repository._repo[id] | |
265 | return blob.as_pretty_string() |
|
267 | return blob.as_pretty_string() | |
266 |
|
268 | |||
267 | def get_file_size(self, path): |
|
269 | def get_file_size(self, path): | |
268 | """ |
|
270 | """ | |
269 | Returns size of the file at given ``path``. |
|
271 | Returns size of the file at given ``path``. | |
270 | """ |
|
272 | """ | |
271 | id = self._get_id_for_path(path) |
|
273 | id = self._get_id_for_path(path) | |
272 | blob = self.repository._repo[id] |
|
274 | blob = self.repository._repo[id] | |
273 | return blob.raw_length() |
|
275 | return blob.raw_length() | |
274 |
|
276 | |||
275 | def get_file_changeset(self, path): |
|
277 | def get_file_changeset(self, path): | |
276 | """ |
|
278 | """ | |
277 | Returns last commit of the file at the given ``path``. |
|
279 | Returns last commit of the file at the given ``path``. | |
278 | """ |
|
280 | """ | |
279 | return self.get_file_history(path, limit=1)[0] |
|
281 | return self.get_file_history(path, limit=1)[0] | |
280 |
|
282 | |||
281 | def get_file_history(self, path, limit=None): |
|
283 | def get_file_history(self, path, limit=None): | |
282 | """ |
|
284 | """ | |
283 | Returns history of file as reversed list of ``Changeset`` objects for |
|
285 | Returns history of file as reversed list of ``Changeset`` objects for | |
284 | which file at given ``path`` has been modified. |
|
286 | which file at given ``path`` has been modified. | |
285 |
|
287 | |||
286 | TODO: This function now uses os underlying 'git' and 'grep' commands |
|
288 | TODO: This function now uses os underlying 'git' and 'grep' commands | |
287 | which is generally not good. Should be replaced with algorithm |
|
289 | which is generally not good. Should be replaced with algorithm | |
288 | iterating commits. |
|
290 | iterating commits. | |
289 | """ |
|
291 | """ | |
290 |
|
292 | |||
291 | self._get_filectx(path) |
|
293 | self._get_filectx(path) | |
292 | if limit: |
|
294 | if limit: | |
293 | cmd = 'log -n %s --pretty="format: %%H" -s -p %s -- "%s"' % ( |
|
295 | cmd = 'log -n %s --pretty="format: %%H" -s -p %s -- "%s"' % ( | |
294 | safe_int(limit, 0), self.id, path |
|
296 | safe_int(limit, 0), self.id, path | |
295 | ) |
|
297 | ) | |
296 | else: |
|
298 | else: | |
297 | cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % ( |
|
299 | cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % ( | |
298 | self.id, path |
|
300 | self.id, path | |
299 | ) |
|
301 | ) | |
300 | so, se = self.repository.run_git_command(cmd) |
|
302 | so, se = self.repository.run_git_command(cmd) | |
301 | ids = re.findall(r'[0-9a-fA-F]{40}', so) |
|
303 | ids = re.findall(r'[0-9a-fA-F]{40}', so) | |
302 | return [self.repository.get_changeset(id) for id in ids] |
|
304 | return [self.repository.get_changeset(id) for id in ids] | |
303 |
|
305 | |||
304 | def get_file_history_2(self, path): |
|
306 | def get_file_history_2(self, path): | |
305 | """ |
|
307 | """ | |
306 | Returns history of file as reversed list of ``Changeset`` objects for |
|
308 | Returns history of file as reversed list of ``Changeset`` objects for | |
307 | which file at given ``path`` has been modified. |
|
309 | which file at given ``path`` has been modified. | |
308 |
|
310 | |||
309 | """ |
|
311 | """ | |
310 | self._get_filectx(path) |
|
312 | self._get_filectx(path) | |
311 | from dulwich.walk import Walker |
|
313 | from dulwich.walk import Walker | |
312 | include = [self.id] |
|
314 | include = [self.id] | |
313 | walker = Walker(self.repository._repo.object_store, include, |
|
315 | walker = Walker(self.repository._repo.object_store, include, | |
314 | paths=[path], max_entries=1) |
|
316 | paths=[path], max_entries=1) | |
315 | return [self.repository.get_changeset(sha) |
|
317 | return [self.repository.get_changeset(sha) | |
316 | for sha in (x.commit.id for x in walker)] |
|
318 | for sha in (x.commit.id for x in walker)] | |
317 |
|
319 | |||
318 | def get_file_annotate(self, path): |
|
320 | def get_file_annotate(self, path): | |
319 | """ |
|
321 | """ | |
320 | Returns a generator of four element tuples with |
|
322 | Returns a generator of four element tuples with | |
321 | lineno, sha, changeset lazy loader and line |
|
323 | lineno, sha, changeset lazy loader and line | |
322 |
|
324 | |||
323 | TODO: This function now uses os underlying 'git' command which is |
|
325 | TODO: This function now uses os underlying 'git' command which is | |
324 | generally not good. Should be replaced with algorithm iterating |
|
326 | generally not good. Should be replaced with algorithm iterating | |
325 | commits. |
|
327 | commits. | |
326 | """ |
|
328 | """ | |
327 | cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path) |
|
329 | cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path) | |
328 | # -l ==> outputs long shas (and we need all 40 characters) |
|
330 | # -l ==> outputs long shas (and we need all 40 characters) | |
329 | # --root ==> doesn't put '^' character for bounderies |
|
331 | # --root ==> doesn't put '^' character for bounderies | |
330 | # -r sha ==> blames for the given revision |
|
332 | # -r sha ==> blames for the given revision | |
331 | so, se = self.repository.run_git_command(cmd) |
|
333 | so, se = self.repository.run_git_command(cmd) | |
332 |
|
334 | |||
333 | for i, blame_line in enumerate(so.split('\n')[:-1]): |
|
335 | for i, blame_line in enumerate(so.split('\n')[:-1]): | |
334 | ln_no = i + 1 |
|
336 | ln_no = i + 1 | |
335 | sha, line = re.split(r' ', blame_line, 1) |
|
337 | sha, line = re.split(r' ', blame_line, 1) | |
336 | yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line) |
|
338 | yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line) | |
337 |
|
339 | |||
338 | def fill_archive(self, stream=None, kind='tgz', prefix=None, |
|
340 | def fill_archive(self, stream=None, kind='tgz', prefix=None, | |
339 | subrepos=False): |
|
341 | subrepos=False): | |
340 | """ |
|
342 | """ | |
341 | Fills up given stream. |
|
343 | Fills up given stream. | |
342 |
|
344 | |||
343 | :param stream: file like object. |
|
345 | :param stream: file like object. | |
344 | :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``. |
|
346 | :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``. | |
345 | Default: ``tgz``. |
|
347 | Default: ``tgz``. | |
346 | :param prefix: name of root directory in archive. |
|
348 | :param prefix: name of root directory in archive. | |
347 | Default is repository name and changeset's raw_id joined with dash |
|
349 | Default is repository name and changeset's raw_id joined with dash | |
348 | (``repo-tip.<KIND>``). |
|
350 | (``repo-tip.<KIND>``). | |
349 | :param subrepos: include subrepos in this archive. |
|
351 | :param subrepos: include subrepos in this archive. | |
350 |
|
352 | |||
351 | :raise ImproperArchiveTypeError: If given kind is wrong. |
|
353 | :raise ImproperArchiveTypeError: If given kind is wrong. | |
352 | :raise VcsError: If given stream is None |
|
354 | :raise VcsError: If given stream is None | |
353 |
|
355 | |||
354 | """ |
|
356 | """ | |
355 | allowed_kinds = settings.ARCHIVE_SPECS.keys() |
|
357 | allowed_kinds = settings.ARCHIVE_SPECS.keys() | |
356 | if kind not in allowed_kinds: |
|
358 | if kind not in allowed_kinds: | |
357 | raise ImproperArchiveTypeError('Archive kind not supported use one' |
|
359 | raise ImproperArchiveTypeError('Archive kind not supported use one' | |
358 | 'of %s', allowed_kinds) |
|
360 | 'of %s', allowed_kinds) | |
359 |
|
361 | |||
360 | if prefix is None: |
|
362 | if prefix is None: | |
361 | prefix = '%s-%s' % (self.repository.name, self.short_id) |
|
363 | prefix = '%s-%s' % (self.repository.name, self.short_id) | |
362 | elif prefix.startswith('/'): |
|
364 | elif prefix.startswith('/'): | |
363 | raise VCSError("Prefix cannot start with leading slash") |
|
365 | raise VCSError("Prefix cannot start with leading slash") | |
364 | elif prefix.strip() == '': |
|
366 | elif prefix.strip() == '': | |
365 | raise VCSError("Prefix cannot be empty") |
|
367 | raise VCSError("Prefix cannot be empty") | |
366 |
|
368 | |||
367 | if kind == 'zip': |
|
369 | if kind == 'zip': | |
368 | frmt = 'zip' |
|
370 | frmt = 'zip' | |
369 | else: |
|
371 | else: | |
370 | frmt = 'tar' |
|
372 | frmt = 'tar' | |
371 | _git_path = rhodecode.CONFIG.get('git_path', 'git') |
|
373 | _git_path = rhodecode.CONFIG.get('git_path', 'git') | |
372 | cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path, |
|
374 | cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path, | |
373 | frmt, prefix, self.raw_id) |
|
375 | frmt, prefix, self.raw_id) | |
374 | if kind == 'tgz': |
|
376 | if kind == 'tgz': | |
375 | cmd += ' | gzip -9' |
|
377 | cmd += ' | gzip -9' | |
376 | elif kind == 'tbz2': |
|
378 | elif kind == 'tbz2': | |
377 | cmd += ' | bzip2 -9' |
|
379 | cmd += ' | bzip2 -9' | |
378 |
|
380 | |||
379 | if stream is None: |
|
381 | if stream is None: | |
380 | raise VCSError('You need to pass in a valid stream for filling' |
|
382 | raise VCSError('You need to pass in a valid stream for filling' | |
381 | ' with archival data') |
|
383 | ' with archival data') | |
382 | popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, |
|
384 | popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, | |
383 | cwd=self.repository.path) |
|
385 | cwd=self.repository.path) | |
384 |
|
386 | |||
385 | buffer_size = 1024 * 8 |
|
387 | buffer_size = 1024 * 8 | |
386 | chunk = popen.stdout.read(buffer_size) |
|
388 | chunk = popen.stdout.read(buffer_size) | |
387 | while chunk: |
|
389 | while chunk: | |
388 | stream.write(chunk) |
|
390 | stream.write(chunk) | |
389 | chunk = popen.stdout.read(buffer_size) |
|
391 | chunk = popen.stdout.read(buffer_size) | |
390 | # Make sure all descriptors would be read |
|
392 | # Make sure all descriptors would be read | |
391 | popen.communicate() |
|
393 | popen.communicate() | |
392 |
|
394 | |||
393 | def get_nodes(self, path): |
|
395 | def get_nodes(self, path): | |
394 | if self._get_kind(path) != NodeKind.DIR: |
|
396 | if self._get_kind(path) != NodeKind.DIR: | |
395 | raise ChangesetError("Directory does not exist for revision %r at " |
|
397 | raise ChangesetError("Directory does not exist for revision %r at " | |
396 | " %r" % (self.revision, path)) |
|
398 | " %r" % (self.revision, path)) | |
397 | path = self._fix_path(path) |
|
399 | path = self._fix_path(path) | |
398 | id = self._get_id_for_path(path) |
|
400 | id = self._get_id_for_path(path) | |
399 | tree = self.repository._repo[id] |
|
401 | tree = self.repository._repo[id] | |
400 | dirnodes = [] |
|
402 | dirnodes = [] | |
401 | filenodes = [] |
|
403 | filenodes = [] | |
402 | als = self.repository.alias |
|
404 | als = self.repository.alias | |
403 | for name, stat, id in tree.iteritems(): |
|
405 | for name, stat, id in tree.iteritems(): | |
404 | if objects.S_ISGITLINK(stat): |
|
406 | if objects.S_ISGITLINK(stat): | |
405 | dirnodes.append(SubModuleNode(name, url=None, changeset=id, |
|
407 | dirnodes.append(SubModuleNode(name, url=None, changeset=id, | |
406 | alias=als)) |
|
408 | alias=als)) | |
407 | continue |
|
409 | continue | |
408 |
|
410 | |||
409 | obj = self.repository._repo.get_object(id) |
|
411 | obj = self.repository._repo.get_object(id) | |
410 | if path != '': |
|
412 | if path != '': | |
411 | obj_path = '/'.join((path, name)) |
|
413 | obj_path = '/'.join((path, name)) | |
412 | else: |
|
414 | else: | |
413 | obj_path = name |
|
415 | obj_path = name | |
414 | if obj_path not in self._stat_modes: |
|
416 | if obj_path not in self._stat_modes: | |
415 | self._stat_modes[obj_path] = stat |
|
417 | self._stat_modes[obj_path] = stat | |
416 | if isinstance(obj, objects.Tree): |
|
418 | if isinstance(obj, objects.Tree): | |
417 | dirnodes.append(DirNode(obj_path, changeset=self)) |
|
419 | dirnodes.append(DirNode(obj_path, changeset=self)) | |
418 | elif isinstance(obj, objects.Blob): |
|
420 | elif isinstance(obj, objects.Blob): | |
419 | filenodes.append(FileNode(obj_path, changeset=self, mode=stat)) |
|
421 | filenodes.append(FileNode(obj_path, changeset=self, mode=stat)) | |
420 | else: |
|
422 | else: | |
421 | raise ChangesetError("Requested object should be Tree " |
|
423 | raise ChangesetError("Requested object should be Tree " | |
422 | "or Blob, is %r" % type(obj)) |
|
424 | "or Blob, is %r" % type(obj)) | |
423 | nodes = dirnodes + filenodes |
|
425 | nodes = dirnodes + filenodes | |
424 | for node in nodes: |
|
426 | for node in nodes: | |
425 | if not node.path in self.nodes: |
|
427 | if not node.path in self.nodes: | |
426 | self.nodes[node.path] = node |
|
428 | self.nodes[node.path] = node | |
427 | nodes.sort() |
|
429 | nodes.sort() | |
428 | return nodes |
|
430 | return nodes | |
429 |
|
431 | |||
430 | def get_node(self, path): |
|
432 | def get_node(self, path): | |
431 | if isinstance(path, unicode): |
|
433 | if isinstance(path, unicode): | |
432 | path = path.encode('utf-8') |
|
434 | path = path.encode('utf-8') | |
433 | path = self._fix_path(path) |
|
435 | path = self._fix_path(path) | |
434 | if not path in self.nodes: |
|
436 | if not path in self.nodes: | |
435 | try: |
|
437 | try: | |
436 | id_ = self._get_id_for_path(path) |
|
438 | id_ = self._get_id_for_path(path) | |
437 | except ChangesetError: |
|
439 | except ChangesetError: | |
438 | raise NodeDoesNotExistError("Cannot find one of parents' " |
|
440 | raise NodeDoesNotExistError("Cannot find one of parents' " | |
439 | "directories for a given path: %s" % path) |
|
441 | "directories for a given path: %s" % path) | |
440 |
|
442 | |||
441 | _GL = lambda m: m and objects.S_ISGITLINK(m) |
|
443 | _GL = lambda m: m and objects.S_ISGITLINK(m) | |
442 | if _GL(self._stat_modes.get(path)): |
|
444 | if _GL(self._stat_modes.get(path)): | |
443 | node = SubModuleNode(path, url=None, changeset=id_, |
|
445 | node = SubModuleNode(path, url=None, changeset=id_, | |
444 | alias=self.repository.alias) |
|
446 | alias=self.repository.alias) | |
445 | else: |
|
447 | else: | |
446 | obj = self.repository._repo.get_object(id_) |
|
448 | obj = self.repository._repo.get_object(id_) | |
447 |
|
449 | |||
448 | if isinstance(obj, objects.Tree): |
|
450 | if isinstance(obj, objects.Tree): | |
449 | if path == '': |
|
451 | if path == '': | |
450 | node = RootNode(changeset=self) |
|
452 | node = RootNode(changeset=self) | |
451 | else: |
|
453 | else: | |
452 | node = DirNode(path, changeset=self) |
|
454 | node = DirNode(path, changeset=self) | |
453 | node._tree = obj |
|
455 | node._tree = obj | |
454 | elif isinstance(obj, objects.Blob): |
|
456 | elif isinstance(obj, objects.Blob): | |
455 | node = FileNode(path, changeset=self) |
|
457 | node = FileNode(path, changeset=self) | |
456 | node._blob = obj |
|
458 | node._blob = obj | |
457 | else: |
|
459 | else: | |
458 | raise NodeDoesNotExistError("There is no file nor directory " |
|
460 | raise NodeDoesNotExistError("There is no file nor directory " | |
459 | "at the given path %r at revision %r" |
|
461 | "at the given path %r at revision %r" | |
460 | % (path, self.short_id)) |
|
462 | % (path, self.short_id)) | |
461 | # cache node |
|
463 | # cache node | |
462 | self.nodes[path] = node |
|
464 | self.nodes[path] = node | |
463 | return self.nodes[path] |
|
465 | return self.nodes[path] | |
464 |
|
466 | |||
465 | @LazyProperty |
|
467 | @LazyProperty | |
466 | def affected_files(self): |
|
468 | def affected_files(self): | |
467 | """ |
|
469 | """ | |
468 | Get's a fast accessible file changes for given changeset |
|
470 | Get's a fast accessible file changes for given changeset | |
469 | """ |
|
471 | """ | |
470 | a, m, d = self._changes_cache |
|
472 | a, m, d = self._changes_cache | |
471 | return list(a.union(m).union(d)) |
|
473 | return list(a.union(m).union(d)) | |
472 |
|
474 | |||
473 | @LazyProperty |
|
475 | @LazyProperty | |
474 | def _diff_name_status(self): |
|
476 | def _diff_name_status(self): | |
475 | output = [] |
|
477 | output = [] | |
476 | for parent in self.parents: |
|
478 | for parent in self.parents: | |
477 | cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id, |
|
479 | cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id, | |
478 | self.raw_id) |
|
480 | self.raw_id) | |
479 | so, se = self.repository.run_git_command(cmd) |
|
481 | so, se = self.repository.run_git_command(cmd) | |
480 | output.append(so.strip()) |
|
482 | output.append(so.strip()) | |
481 | return '\n'.join(output) |
|
483 | return '\n'.join(output) | |
482 |
|
484 | |||
483 | @LazyProperty |
|
485 | @LazyProperty | |
484 | def _changes_cache(self): |
|
486 | def _changes_cache(self): | |
485 | added = set() |
|
487 | added = set() | |
486 | modified = set() |
|
488 | modified = set() | |
487 | deleted = set() |
|
489 | deleted = set() | |
488 | _r = self.repository._repo |
|
490 | _r = self.repository._repo | |
489 |
|
491 | |||
490 | parents = self.parents |
|
492 | parents = self.parents | |
491 | if not self.parents: |
|
493 | if not self.parents: | |
492 | parents = [EmptyChangeset()] |
|
494 | parents = [EmptyChangeset()] | |
493 | for parent in parents: |
|
495 | for parent in parents: | |
494 | if isinstance(parent, EmptyChangeset): |
|
496 | if isinstance(parent, EmptyChangeset): | |
495 | oid = None |
|
497 | oid = None | |
496 | else: |
|
498 | else: | |
497 | oid = _r[parent.raw_id].tree |
|
499 | oid = _r[parent.raw_id].tree | |
498 | changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree) |
|
500 | changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree) | |
499 | for (oldpath, newpath), (_, _), (_, _) in changes: |
|
501 | for (oldpath, newpath), (_, _), (_, _) in changes: | |
500 | if newpath and oldpath: |
|
502 | if newpath and oldpath: | |
501 | modified.add(newpath) |
|
503 | modified.add(newpath) | |
502 | elif newpath and not oldpath: |
|
504 | elif newpath and not oldpath: | |
503 | added.add(newpath) |
|
505 | added.add(newpath) | |
504 | elif not newpath and oldpath: |
|
506 | elif not newpath and oldpath: | |
505 | deleted.add(oldpath) |
|
507 | deleted.add(oldpath) | |
506 | return added, modified, deleted |
|
508 | return added, modified, deleted | |
507 |
|
509 | |||
508 | def _get_paths_for_status(self, status): |
|
510 | def _get_paths_for_status(self, status): | |
509 | """ |
|
511 | """ | |
510 | Returns sorted list of paths for given ``status``. |
|
512 | Returns sorted list of paths for given ``status``. | |
511 |
|
513 | |||
512 | :param status: one of: *added*, *modified* or *deleted* |
|
514 | :param status: one of: *added*, *modified* or *deleted* | |
513 | """ |
|
515 | """ | |
514 | a, m, d = self._changes_cache |
|
516 | a, m, d = self._changes_cache | |
515 | return sorted({ |
|
517 | return sorted({ | |
516 | 'added': list(a), |
|
518 | 'added': list(a), | |
517 | 'modified': list(m), |
|
519 | 'modified': list(m), | |
518 | 'deleted': list(d)}[status] |
|
520 | 'deleted': list(d)}[status] | |
519 | ) |
|
521 | ) | |
520 |
|
522 | |||
521 | @LazyProperty |
|
523 | @LazyProperty | |
522 | def added(self): |
|
524 | def added(self): | |
523 | """ |
|
525 | """ | |
524 | Returns list of added ``FileNode`` objects. |
|
526 | Returns list of added ``FileNode`` objects. | |
525 | """ |
|
527 | """ | |
526 | if not self.parents: |
|
528 | if not self.parents: | |
527 | return list(self._get_file_nodes()) |
|
529 | return list(self._get_file_nodes()) | |
528 | return AddedFileNodesGenerator([n for n in |
|
530 | return AddedFileNodesGenerator([n for n in | |
529 | self._get_paths_for_status('added')], self) |
|
531 | self._get_paths_for_status('added')], self) | |
530 |
|
532 | |||
531 | @LazyProperty |
|
533 | @LazyProperty | |
532 | def changed(self): |
|
534 | def changed(self): | |
533 | """ |
|
535 | """ | |
534 | Returns list of modified ``FileNode`` objects. |
|
536 | Returns list of modified ``FileNode`` objects. | |
535 | """ |
|
537 | """ | |
536 | if not self.parents: |
|
538 | if not self.parents: | |
537 | return [] |
|
539 | return [] | |
538 | return ChangedFileNodesGenerator([n for n in |
|
540 | return ChangedFileNodesGenerator([n for n in | |
539 | self._get_paths_for_status('modified')], self) |
|
541 | self._get_paths_for_status('modified')], self) | |
540 |
|
542 | |||
541 | @LazyProperty |
|
543 | @LazyProperty | |
542 | def removed(self): |
|
544 | def removed(self): | |
543 | """ |
|
545 | """ | |
544 | Returns list of removed ``FileNode`` objects. |
|
546 | Returns list of removed ``FileNode`` objects. | |
545 | """ |
|
547 | """ | |
546 | if not self.parents: |
|
548 | if not self.parents: | |
547 | return [] |
|
549 | return [] | |
548 | return RemovedFileNodesGenerator([n for n in |
|
550 | return RemovedFileNodesGenerator([n for n in | |
549 | self._get_paths_for_status('deleted')], self) |
|
551 | self._get_paths_for_status('deleted')], self) |
@@ -1,694 +1,698 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | vcs.backends.git |
|
3 | vcs.backends.git | |
4 | ~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Git backend implementation. |
|
6 | Git backend implementation. | |
7 |
|
7 | |||
8 | :created_on: Apr 8, 2010 |
|
8 | :created_on: Apr 8, 2010 | |
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. |
|
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. | |
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
12 | import os |
|
12 | import os | |
13 | import re |
|
13 | import re | |
14 | import time |
|
14 | import time | |
15 | import posixpath |
|
15 | import posixpath | |
16 | import logging |
|
16 | import logging | |
17 | import traceback |
|
17 | import traceback | |
18 | import urllib |
|
18 | import urllib | |
19 | import urllib2 |
|
19 | import urllib2 | |
20 | from dulwich.repo import Repo, NotGitRepository |
|
20 | from dulwich.repo import Repo, NotGitRepository | |
21 | from dulwich.objects import Tag |
|
21 | from dulwich.objects import Tag | |
22 | from string import Template |
|
22 | from string import Template | |
23 |
|
23 | |||
24 | import rhodecode |
|
24 | import rhodecode | |
25 | from rhodecode.lib.vcs.backends.base import BaseRepository |
|
25 | from rhodecode.lib.vcs.backends.base import BaseRepository | |
26 | from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError |
|
26 | from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError | |
27 | from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError |
|
27 | from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError | |
28 | from rhodecode.lib.vcs.exceptions import EmptyRepositoryError |
|
28 | from rhodecode.lib.vcs.exceptions import EmptyRepositoryError | |
29 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
29 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
30 | from rhodecode.lib.vcs.exceptions import TagAlreadyExistError |
|
30 | from rhodecode.lib.vcs.exceptions import TagAlreadyExistError | |
31 | from rhodecode.lib.vcs.exceptions import TagDoesNotExistError |
|
31 | from rhodecode.lib.vcs.exceptions import TagDoesNotExistError | |
32 | from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp |
|
32 | from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp | |
33 | from rhodecode.lib.vcs.utils.lazy import LazyProperty, ThreadLocalLazyProperty |
|
33 | from rhodecode.lib.vcs.utils.lazy import LazyProperty, ThreadLocalLazyProperty | |
34 | from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict |
|
34 | from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict | |
35 | from rhodecode.lib.vcs.utils.paths import abspath |
|
35 | from rhodecode.lib.vcs.utils.paths import abspath | |
36 | from rhodecode.lib.vcs.utils.paths import get_user_home |
|
36 | from rhodecode.lib.vcs.utils.paths import get_user_home | |
37 | from .workdir import GitWorkdir |
|
37 | from .workdir import GitWorkdir | |
38 | from .changeset import GitChangeset |
|
38 | from .changeset import GitChangeset | |
39 | from .inmemory import GitInMemoryChangeset |
|
39 | from .inmemory import GitInMemoryChangeset | |
40 | from .config import ConfigFile |
|
40 | from .config import ConfigFile | |
41 | from rhodecode.lib import subprocessio |
|
41 | from rhodecode.lib import subprocessio | |
42 |
|
42 | |||
43 |
|
43 | |||
44 | log = logging.getLogger(__name__) |
|
44 | log = logging.getLogger(__name__) | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | class GitRepository(BaseRepository): |
|
47 | class GitRepository(BaseRepository): | |
48 | """ |
|
48 | """ | |
49 | Git repository backend. |
|
49 | Git repository backend. | |
50 | """ |
|
50 | """ | |
51 | DEFAULT_BRANCH_NAME = 'master' |
|
51 | DEFAULT_BRANCH_NAME = 'master' | |
52 | scm = 'git' |
|
52 | scm = 'git' | |
53 |
|
53 | |||
54 | def __init__(self, repo_path, create=False, src_url=None, |
|
54 | def __init__(self, repo_path, create=False, src_url=None, | |
55 | update_after_clone=False, bare=False): |
|
55 | update_after_clone=False, bare=False): | |
56 |
|
56 | |||
57 | self.path = abspath(repo_path) |
|
57 | self.path = abspath(repo_path) | |
58 | repo = self._get_repo(create, src_url, update_after_clone, bare) |
|
58 | repo = self._get_repo(create, src_url, update_after_clone, bare) | |
59 | self.bare = repo.bare |
|
59 | self.bare = repo.bare | |
60 |
|
60 | |||
61 | self._config_files = [ |
|
61 | self._config_files = [ | |
62 | bare and abspath(self.path, 'config') |
|
62 | bare and abspath(self.path, 'config') | |
63 | or abspath(self.path, '.git', 'config'), |
|
63 | or abspath(self.path, '.git', 'config'), | |
64 | abspath(get_user_home(), '.gitconfig'), |
|
64 | abspath(get_user_home(), '.gitconfig'), | |
65 | ] |
|
65 | ] | |
66 |
|
66 | |||
67 | @ThreadLocalLazyProperty |
|
67 | @ThreadLocalLazyProperty | |
68 | def _repo(self): |
|
68 | def _repo(self): | |
69 | repo = Repo(self.path) |
|
69 | repo = Repo(self.path) | |
70 | # patch the instance of GitRepo with an "FAKE" ui object to add |
|
70 | # patch the instance of GitRepo with an "FAKE" ui object to add | |
71 | # compatibility layer with Mercurial |
|
71 | # compatibility layer with Mercurial | |
72 | if not hasattr(repo, 'ui'): |
|
72 | if not hasattr(repo, 'ui'): | |
73 | from mercurial.ui import ui |
|
73 | from mercurial.ui import ui | |
74 | baseui = ui() |
|
74 | baseui = ui() | |
75 | setattr(repo, 'ui', baseui) |
|
75 | setattr(repo, 'ui', baseui) | |
76 | return repo |
|
76 | return repo | |
77 |
|
77 | |||
78 | @property |
|
78 | @property | |
79 | def head(self): |
|
79 | def head(self): | |
80 | try: |
|
80 | try: | |
81 | return self._repo.head() |
|
81 | return self._repo.head() | |
82 | except KeyError: |
|
82 | except KeyError: | |
83 | return None |
|
83 | return None | |
84 |
|
84 | |||
85 | @LazyProperty |
|
85 | @LazyProperty | |
86 | def revisions(self): |
|
86 | def revisions(self): | |
87 | """ |
|
87 | """ | |
88 | Returns list of revisions' ids, in ascending order. Being lazy |
|
88 | Returns list of revisions' ids, in ascending order. Being lazy | |
89 | attribute allows external tools to inject shas from cache. |
|
89 | attribute allows external tools to inject shas from cache. | |
90 | """ |
|
90 | """ | |
91 | return self._get_all_revisions() |
|
91 | return self._get_all_revisions() | |
92 |
|
92 | |||
93 | @classmethod |
|
93 | @classmethod | |
94 | def _run_git_command(cls, cmd, **opts): |
|
94 | def _run_git_command(cls, cmd, **opts): | |
95 | """ |
|
95 | """ | |
96 | Runs given ``cmd`` as git command and returns tuple |
|
96 | Runs given ``cmd`` as git command and returns tuple | |
97 | (stdout, stderr). |
|
97 | (stdout, stderr). | |
98 |
|
98 | |||
99 | :param cmd: git command to be executed |
|
99 | :param cmd: git command to be executed | |
100 | :param opts: env options to pass into Subprocess command |
|
100 | :param opts: env options to pass into Subprocess command | |
101 | """ |
|
101 | """ | |
102 |
|
102 | |||
103 | if '_bare' in opts: |
|
103 | if '_bare' in opts: | |
104 | _copts = [] |
|
104 | _copts = [] | |
105 | del opts['_bare'] |
|
105 | del opts['_bare'] | |
106 | else: |
|
106 | else: | |
107 | _copts = ['-c', 'core.quotepath=false', ] |
|
107 | _copts = ['-c', 'core.quotepath=false', ] | |
108 | safe_call = False |
|
108 | safe_call = False | |
109 | if '_safe' in opts: |
|
109 | if '_safe' in opts: | |
110 | #no exc on failure |
|
110 | #no exc on failure | |
111 | del opts['_safe'] |
|
111 | del opts['_safe'] | |
112 | safe_call = True |
|
112 | safe_call = True | |
113 |
|
113 | |||
114 | _str_cmd = False |
|
114 | _str_cmd = False | |
115 | if isinstance(cmd, basestring): |
|
115 | if isinstance(cmd, basestring): | |
116 | cmd = [cmd] |
|
116 | cmd = [cmd] | |
117 | _str_cmd = True |
|
117 | _str_cmd = True | |
118 |
|
118 | |||
119 | gitenv = os.environ |
|
119 | gitenv = os.environ | |
120 | # need to clean fix GIT_DIR ! |
|
120 | # need to clean fix GIT_DIR ! | |
121 | if 'GIT_DIR' in gitenv: |
|
121 | if 'GIT_DIR' in gitenv: | |
122 | del gitenv['GIT_DIR'] |
|
122 | del gitenv['GIT_DIR'] | |
123 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' |
|
123 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' | |
124 |
|
124 | |||
125 | _git_path = rhodecode.CONFIG.get('git_path', 'git') |
|
125 | _git_path = rhodecode.CONFIG.get('git_path', 'git') | |
126 | cmd = [_git_path] + _copts + cmd |
|
126 | cmd = [_git_path] + _copts + cmd | |
127 | if _str_cmd: |
|
127 | if _str_cmd: | |
128 | cmd = ' '.join(cmd) |
|
128 | cmd = ' '.join(cmd) | |
129 | try: |
|
129 | try: | |
130 | _opts = dict( |
|
130 | _opts = dict( | |
131 | env=gitenv, |
|
131 | env=gitenv, | |
132 | shell=False, |
|
132 | shell=False, | |
133 | ) |
|
133 | ) | |
134 | _opts.update(opts) |
|
134 | _opts.update(opts) | |
135 | p = subprocessio.SubprocessIOChunker(cmd, **_opts) |
|
135 | p = subprocessio.SubprocessIOChunker(cmd, **_opts) | |
136 | except (EnvironmentError, OSError), err: |
|
136 | except (EnvironmentError, OSError), err: | |
137 | tb_err = ("Couldn't run git command (%s).\n" |
|
137 | tb_err = ("Couldn't run git command (%s).\n" | |
138 | "Original error was:%s\n" % (cmd, err)) |
|
138 | "Original error was:%s\n" % (cmd, err)) | |
139 | log.error(tb_err) |
|
139 | log.error(tb_err) | |
140 | if safe_call: |
|
140 | if safe_call: | |
141 | return '', err |
|
141 | return '', err | |
142 | else: |
|
142 | else: | |
143 | raise RepositoryError(tb_err) |
|
143 | raise RepositoryError(tb_err) | |
144 |
|
144 | |||
145 | return ''.join(p.output), ''.join(p.error) |
|
145 | return ''.join(p.output), ''.join(p.error) | |
146 |
|
146 | |||
147 | def run_git_command(self, cmd): |
|
147 | def run_git_command(self, cmd): | |
148 | opts = {} |
|
148 | opts = {} | |
149 | if os.path.isdir(self.path): |
|
149 | if os.path.isdir(self.path): | |
150 | opts['cwd'] = self.path |
|
150 | opts['cwd'] = self.path | |
151 | return self._run_git_command(cmd, **opts) |
|
151 | return self._run_git_command(cmd, **opts) | |
152 |
|
152 | |||
153 | @classmethod |
|
153 | @classmethod | |
154 | def _check_url(cls, url): |
|
154 | def _check_url(cls, url): | |
155 | """ |
|
155 | """ | |
156 | Functon will check given url and try to verify if it's a valid |
|
156 | Functon will check given url and try to verify if it's a valid | |
157 | link. Sometimes it may happened that mercurial will issue basic |
|
157 | link. Sometimes it may happened that mercurial will issue basic | |
158 | auth request that can cause whole API to hang when used from python |
|
158 | auth request that can cause whole API to hang when used from python | |
159 | or other external calls. |
|
159 | or other external calls. | |
160 |
|
160 | |||
161 | On failures it'll raise urllib2.HTTPError |
|
161 | On failures it'll raise urllib2.HTTPError | |
162 | """ |
|
162 | """ | |
163 | from mercurial.util import url as Url |
|
163 | from mercurial.util import url as Url | |
164 |
|
164 | |||
165 | # those authnadlers are patched for python 2.6.5 bug an |
|
165 | # those authnadlers are patched for python 2.6.5 bug an | |
166 | # infinit looping when given invalid resources |
|
166 | # infinit looping when given invalid resources | |
167 | from mercurial.url import httpbasicauthhandler, httpdigestauthhandler |
|
167 | from mercurial.url import httpbasicauthhandler, httpdigestauthhandler | |
168 |
|
168 | |||
169 | # check first if it's not an local url |
|
169 | # check first if it's not an local url | |
170 | if os.path.isdir(url) or url.startswith('file:'): |
|
170 | if os.path.isdir(url) or url.startswith('file:'): | |
171 | return True |
|
171 | return True | |
172 |
|
172 | |||
173 | if('+' in url[:url.find('://')]): |
|
173 | if('+' in url[:url.find('://')]): | |
174 | url = url[url.find('+') + 1:] |
|
174 | url = url[url.find('+') + 1:] | |
175 |
|
175 | |||
176 | handlers = [] |
|
176 | handlers = [] | |
177 | test_uri, authinfo = Url(url).authinfo() |
|
177 | test_uri, authinfo = Url(url).authinfo() | |
178 | if not test_uri.endswith('info/refs'): |
|
178 | if not test_uri.endswith('info/refs'): | |
179 | test_uri = test_uri.rstrip('/') + '/info/refs' |
|
179 | test_uri = test_uri.rstrip('/') + '/info/refs' | |
180 | if authinfo: |
|
180 | if authinfo: | |
181 | #create a password manager |
|
181 | #create a password manager | |
182 | passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() |
|
182 | passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() | |
183 | passmgr.add_password(*authinfo) |
|
183 | passmgr.add_password(*authinfo) | |
184 |
|
184 | |||
185 | handlers.extend((httpbasicauthhandler(passmgr), |
|
185 | handlers.extend((httpbasicauthhandler(passmgr), | |
186 | httpdigestauthhandler(passmgr))) |
|
186 | httpdigestauthhandler(passmgr))) | |
187 |
|
187 | |||
188 | o = urllib2.build_opener(*handlers) |
|
188 | o = urllib2.build_opener(*handlers) | |
189 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git |
|
189 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git | |
190 |
|
190 | |||
191 | q = {"service": 'git-upload-pack'} |
|
191 | q = {"service": 'git-upload-pack'} | |
192 | qs = '?%s' % urllib.urlencode(q) |
|
192 | qs = '?%s' % urllib.urlencode(q) | |
193 | cu = "%s%s" % (test_uri, qs) |
|
193 | cu = "%s%s" % (test_uri, qs) | |
194 | req = urllib2.Request(cu, None, {}) |
|
194 | req = urllib2.Request(cu, None, {}) | |
195 |
|
195 | |||
196 | try: |
|
196 | try: | |
197 | resp = o.open(req) |
|
197 | resp = o.open(req) | |
198 | return resp.code == 200 |
|
198 | return resp.code == 200 | |
199 | except Exception, e: |
|
199 | except Exception, e: | |
200 | # means it cannot be cloned |
|
200 | # means it cannot be cloned | |
201 | raise urllib2.URLError("[%s] %s" % (url, e)) |
|
201 | raise urllib2.URLError("[%s] %s" % (url, e)) | |
202 |
|
202 | |||
203 | def _get_repo(self, create, src_url=None, update_after_clone=False, |
|
203 | def _get_repo(self, create, src_url=None, update_after_clone=False, | |
204 | bare=False): |
|
204 | bare=False): | |
205 | if create and os.path.exists(self.path): |
|
205 | if create and os.path.exists(self.path): | |
206 | raise RepositoryError("Location already exist") |
|
206 | raise RepositoryError("Location already exist") | |
207 | if src_url and not create: |
|
207 | if src_url and not create: | |
208 | raise RepositoryError("Create should be set to True if src_url is " |
|
208 | raise RepositoryError("Create should be set to True if src_url is " | |
209 | "given (clone operation creates repository)") |
|
209 | "given (clone operation creates repository)") | |
210 | try: |
|
210 | try: | |
211 | if create and src_url: |
|
211 | if create and src_url: | |
212 | GitRepository._check_url(src_url) |
|
212 | GitRepository._check_url(src_url) | |
213 | self.clone(src_url, update_after_clone, bare) |
|
213 | self.clone(src_url, update_after_clone, bare) | |
214 | return Repo(self.path) |
|
214 | return Repo(self.path) | |
215 | elif create: |
|
215 | elif create: | |
216 | os.mkdir(self.path) |
|
216 | os.mkdir(self.path) | |
217 | if bare: |
|
217 | if bare: | |
218 | return Repo.init_bare(self.path) |
|
218 | return Repo.init_bare(self.path) | |
219 | else: |
|
219 | else: | |
220 | return Repo.init(self.path) |
|
220 | return Repo.init(self.path) | |
221 | else: |
|
221 | else: | |
222 | return self._repo |
|
222 | return self._repo | |
223 | except (NotGitRepository, OSError), err: |
|
223 | except (NotGitRepository, OSError), err: | |
224 | raise RepositoryError(err) |
|
224 | raise RepositoryError(err) | |
225 |
|
225 | |||
226 | def _get_all_revisions(self): |
|
226 | def _get_all_revisions(self): | |
227 | # we must check if this repo is not empty, since later command |
|
227 | # we must check if this repo is not empty, since later command | |
228 | # fails if it is. And it's cheaper to ask than throw the subprocess |
|
228 | # fails if it is. And it's cheaper to ask than throw the subprocess | |
229 | # errors |
|
229 | # errors | |
230 | try: |
|
230 | try: | |
231 | self._repo.head() |
|
231 | self._repo.head() | |
232 | except KeyError: |
|
232 | except KeyError: | |
233 | return [] |
|
233 | return [] | |
234 | cmd = 'rev-list --all --reverse --date-order' |
|
234 | rev_filter = _git_path = rhodecode.CONFIG.get('git_rev_filter', | |
|
235 | '--all').strip() | |||
|
236 | cmd = 'rev-list %s --reverse --date-order' % (rev_filter) | |||
235 | try: |
|
237 | try: | |
236 | so, se = self.run_git_command(cmd) |
|
238 | so, se = self.run_git_command(cmd) | |
237 | except RepositoryError: |
|
239 | except RepositoryError: | |
238 | # Can be raised for empty repositories |
|
240 | # Can be raised for empty repositories | |
239 | return [] |
|
241 | return [] | |
240 | return so.splitlines() |
|
242 | return so.splitlines() | |
241 |
|
243 | |||
242 | def _get_all_revisions2(self): |
|
244 | def _get_all_revisions2(self): | |
243 | #alternate implementation using dulwich |
|
245 | #alternate implementation using dulwich | |
244 | includes = [x[1][0] for x in self._parsed_refs.iteritems() |
|
246 | includes = [x[1][0] for x in self._parsed_refs.iteritems() | |
245 | if x[1][1] != 'T'] |
|
247 | if x[1][1] != 'T'] | |
246 | return [c.commit.id for c in self._repo.get_walker(include=includes)] |
|
248 | return [c.commit.id for c in self._repo.get_walker(include=includes)] | |
247 |
|
249 | |||
248 | def _get_revision(self, revision): |
|
250 | def _get_revision(self, revision): | |
249 | """ |
|
251 | """ | |
250 | For git backend we always return integer here. This way we ensure |
|
252 | For git backend we always return integer here. This way we ensure | |
251 | that changset's revision attribute would become integer. |
|
253 | that changset's revision attribute would become integer. | |
252 | """ |
|
254 | """ | |
253 | pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$') |
|
255 | pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$') | |
254 | is_bstr = lambda o: isinstance(o, (str, unicode)) |
|
256 | is_bstr = lambda o: isinstance(o, (str, unicode)) | |
255 | is_null = lambda o: len(o) == revision.count('0') |
|
257 | is_null = lambda o: len(o) == revision.count('0') | |
256 |
|
258 | |||
257 | if len(self.revisions) == 0: |
|
259 | if len(self.revisions) == 0: | |
258 | raise EmptyRepositoryError("There are no changesets yet") |
|
260 | raise EmptyRepositoryError("There are no changesets yet") | |
259 |
|
261 | |||
260 | if revision in (None, '', 'tip', 'HEAD', 'head', -1): |
|
262 | if revision in (None, '', 'tip', 'HEAD', 'head', -1): | |
261 | revision = self.revisions[-1] |
|
263 | revision = self.revisions[-1] | |
262 |
|
264 | |||
263 | if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12) |
|
265 | if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12) | |
264 | or isinstance(revision, int) or is_null(revision)): |
|
266 | or isinstance(revision, int) or is_null(revision)): | |
265 | try: |
|
267 | try: | |
266 | revision = self.revisions[int(revision)] |
|
268 | revision = self.revisions[int(revision)] | |
267 | except: |
|
269 | except: | |
268 | raise ChangesetDoesNotExistError("Revision %r does not exist " |
|
270 | raise ChangesetDoesNotExistError("Revision %r does not exist " | |
269 | "for this repository %s" % (revision, self)) |
|
271 | "for this repository %s" % (revision, self)) | |
270 |
|
272 | |||
271 | elif is_bstr(revision): |
|
273 | elif is_bstr(revision): | |
272 | # get by branch/tag name |
|
274 | # get by branch/tag name | |
273 | _ref_revision = self._parsed_refs.get(revision) |
|
275 | _ref_revision = self._parsed_refs.get(revision) | |
274 | _tags_shas = self.tags.values() |
|
276 | _tags_shas = self.tags.values() | |
275 | if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']: |
|
277 | if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']: | |
276 | return _ref_revision[0] |
|
278 | return _ref_revision[0] | |
277 |
|
279 | |||
278 | # maybe it's a tag ? we don't have them in self.revisions |
|
280 | # maybe it's a tag ? we don't have them in self.revisions | |
279 | elif revision in _tags_shas: |
|
281 | elif revision in _tags_shas: | |
280 | return _tags_shas[_tags_shas.index(revision)] |
|
282 | return _tags_shas[_tags_shas.index(revision)] | |
281 |
|
283 | |||
282 | elif not pattern.match(revision) or revision not in self.revisions: |
|
284 | elif not pattern.match(revision) or revision not in self.revisions: | |
283 | raise ChangesetDoesNotExistError("Revision %r does not exist " |
|
285 | raise ChangesetDoesNotExistError("Revision %r does not exist " | |
284 | "for this repository %s" % (revision, self)) |
|
286 | "for this repository %s" % (revision, self)) | |
285 |
|
287 | |||
286 | # Ensure we return full id |
|
288 | # Ensure we return full id | |
287 | if not pattern.match(str(revision)): |
|
289 | if not pattern.match(str(revision)): | |
288 | raise ChangesetDoesNotExistError("Given revision %r not recognized" |
|
290 | raise ChangesetDoesNotExistError("Given revision %r not recognized" | |
289 | % revision) |
|
291 | % revision) | |
290 | return revision |
|
292 | return revision | |
291 |
|
293 | |||
292 | def _get_archives(self, archive_name='tip'): |
|
294 | def _get_archives(self, archive_name='tip'): | |
293 |
|
295 | |||
294 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: |
|
296 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: | |
295 | yield {"type": i[0], "extension": i[1], "node": archive_name} |
|
297 | yield {"type": i[0], "extension": i[1], "node": archive_name} | |
296 |
|
298 | |||
297 | def _get_url(self, url): |
|
299 | def _get_url(self, url): | |
298 | """ |
|
300 | """ | |
299 | Returns normalized url. If schema is not given, would fall to |
|
301 | Returns normalized url. If schema is not given, would fall to | |
300 | filesystem (``file:///``) schema. |
|
302 | filesystem (``file:///``) schema. | |
301 | """ |
|
303 | """ | |
302 | url = str(url) |
|
304 | url = str(url) | |
303 | if url != 'default' and not '://' in url: |
|
305 | if url != 'default' and not '://' in url: | |
304 | url = ':///'.join(('file', url)) |
|
306 | url = ':///'.join(('file', url)) | |
305 | return url |
|
307 | return url | |
306 |
|
308 | |||
307 | def get_hook_location(self): |
|
309 | def get_hook_location(self): | |
308 | """ |
|
310 | """ | |
309 | returns absolute path to location where hooks are stored |
|
311 | returns absolute path to location where hooks are stored | |
310 | """ |
|
312 | """ | |
311 | loc = os.path.join(self.path, 'hooks') |
|
313 | loc = os.path.join(self.path, 'hooks') | |
312 | if not self.bare: |
|
314 | if not self.bare: | |
313 | loc = os.path.join(self.path, '.git', 'hooks') |
|
315 | loc = os.path.join(self.path, '.git', 'hooks') | |
314 | return loc |
|
316 | return loc | |
315 |
|
317 | |||
316 | @LazyProperty |
|
318 | @LazyProperty | |
317 | def name(self): |
|
319 | def name(self): | |
318 | return os.path.basename(self.path) |
|
320 | return os.path.basename(self.path) | |
319 |
|
321 | |||
320 | @LazyProperty |
|
322 | @LazyProperty | |
321 | def last_change(self): |
|
323 | def last_change(self): | |
322 | """ |
|
324 | """ | |
323 | Returns last change made on this repository as datetime object |
|
325 | Returns last change made on this repository as datetime object | |
324 | """ |
|
326 | """ | |
325 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) |
|
327 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) | |
326 |
|
328 | |||
327 | def _get_mtime(self): |
|
329 | def _get_mtime(self): | |
328 | try: |
|
330 | try: | |
329 | return time.mktime(self.get_changeset().date.timetuple()) |
|
331 | return time.mktime(self.get_changeset().date.timetuple()) | |
330 | except RepositoryError: |
|
332 | except RepositoryError: | |
331 | idx_loc = '' if self.bare else '.git' |
|
333 | idx_loc = '' if self.bare else '.git' | |
332 | # fallback to filesystem |
|
334 | # fallback to filesystem | |
333 | in_path = os.path.join(self.path, idx_loc, "index") |
|
335 | in_path = os.path.join(self.path, idx_loc, "index") | |
334 | he_path = os.path.join(self.path, idx_loc, "HEAD") |
|
336 | he_path = os.path.join(self.path, idx_loc, "HEAD") | |
335 | if os.path.exists(in_path): |
|
337 | if os.path.exists(in_path): | |
336 | return os.stat(in_path).st_mtime |
|
338 | return os.stat(in_path).st_mtime | |
337 | else: |
|
339 | else: | |
338 | return os.stat(he_path).st_mtime |
|
340 | return os.stat(he_path).st_mtime | |
339 |
|
341 | |||
340 | @LazyProperty |
|
342 | @LazyProperty | |
341 | def description(self): |
|
343 | def description(self): | |
342 | idx_loc = '' if self.bare else '.git' |
|
344 | idx_loc = '' if self.bare else '.git' | |
343 | undefined_description = u'unknown' |
|
345 | undefined_description = u'unknown' | |
344 | description_path = os.path.join(self.path, idx_loc, 'description') |
|
346 | description_path = os.path.join(self.path, idx_loc, 'description') | |
345 | if os.path.isfile(description_path): |
|
347 | if os.path.isfile(description_path): | |
346 | return safe_unicode(open(description_path).read()) |
|
348 | return safe_unicode(open(description_path).read()) | |
347 | else: |
|
349 | else: | |
348 | return undefined_description |
|
350 | return undefined_description | |
349 |
|
351 | |||
350 | @LazyProperty |
|
352 | @LazyProperty | |
351 | def contact(self): |
|
353 | def contact(self): | |
352 | undefined_contact = u'Unknown' |
|
354 | undefined_contact = u'Unknown' | |
353 | return undefined_contact |
|
355 | return undefined_contact | |
354 |
|
356 | |||
355 | @property |
|
357 | @property | |
356 | def branches(self): |
|
358 | def branches(self): | |
357 | if not self.revisions: |
|
359 | if not self.revisions: | |
358 | return {} |
|
360 | return {} | |
359 | sortkey = lambda ctx: ctx[0] |
|
361 | sortkey = lambda ctx: ctx[0] | |
360 | _branches = [(x[0], x[1][0]) |
|
362 | _branches = [(x[0], x[1][0]) | |
361 | for x in self._parsed_refs.iteritems() if x[1][1] == 'H'] |
|
363 | for x in self._parsed_refs.iteritems() if x[1][1] == 'H'] | |
362 | return OrderedDict(sorted(_branches, key=sortkey, reverse=False)) |
|
364 | return OrderedDict(sorted(_branches, key=sortkey, reverse=False)) | |
363 |
|
365 | |||
364 | @LazyProperty |
|
366 | @LazyProperty | |
365 | def tags(self): |
|
367 | def tags(self): | |
366 | return self._get_tags() |
|
368 | return self._get_tags() | |
367 |
|
369 | |||
368 | def _get_tags(self): |
|
370 | def _get_tags(self): | |
369 | if not self.revisions: |
|
371 | if not self.revisions: | |
370 | return {} |
|
372 | return {} | |
371 |
|
373 | |||
372 | sortkey = lambda ctx: ctx[0] |
|
374 | sortkey = lambda ctx: ctx[0] | |
373 | _tags = [(x[0], x[1][0]) |
|
375 | _tags = [(x[0], x[1][0]) | |
374 | for x in self._parsed_refs.iteritems() if x[1][1] == 'T'] |
|
376 | for x in self._parsed_refs.iteritems() if x[1][1] == 'T'] | |
375 | return OrderedDict(sorted(_tags, key=sortkey, reverse=True)) |
|
377 | return OrderedDict(sorted(_tags, key=sortkey, reverse=True)) | |
376 |
|
378 | |||
377 | def tag(self, name, user, revision=None, message=None, date=None, |
|
379 | def tag(self, name, user, revision=None, message=None, date=None, | |
378 | **kwargs): |
|
380 | **kwargs): | |
379 | """ |
|
381 | """ | |
380 | Creates and returns a tag for the given ``revision``. |
|
382 | Creates and returns a tag for the given ``revision``. | |
381 |
|
383 | |||
382 | :param name: name for new tag |
|
384 | :param name: name for new tag | |
383 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
385 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
384 | :param revision: changeset id for which new tag would be created |
|
386 | :param revision: changeset id for which new tag would be created | |
385 | :param message: message of the tag's commit |
|
387 | :param message: message of the tag's commit | |
386 | :param date: date of tag's commit |
|
388 | :param date: date of tag's commit | |
387 |
|
389 | |||
388 | :raises TagAlreadyExistError: if tag with same name already exists |
|
390 | :raises TagAlreadyExistError: if tag with same name already exists | |
389 | """ |
|
391 | """ | |
390 | if name in self.tags: |
|
392 | if name in self.tags: | |
391 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
393 | raise TagAlreadyExistError("Tag %s already exists" % name) | |
392 | changeset = self.get_changeset(revision) |
|
394 | changeset = self.get_changeset(revision) | |
393 | message = message or "Added tag %s for commit %s" % (name, |
|
395 | message = message or "Added tag %s for commit %s" % (name, | |
394 | changeset.raw_id) |
|
396 | changeset.raw_id) | |
395 | self._repo.refs["refs/tags/%s" % name] = changeset._commit.id |
|
397 | self._repo.refs["refs/tags/%s" % name] = changeset._commit.id | |
396 |
|
398 | |||
397 | self._parsed_refs = self._get_parsed_refs() |
|
399 | self._parsed_refs = self._get_parsed_refs() | |
398 | self.tags = self._get_tags() |
|
400 | self.tags = self._get_tags() | |
399 | return changeset |
|
401 | return changeset | |
400 |
|
402 | |||
401 | def remove_tag(self, name, user, message=None, date=None): |
|
403 | def remove_tag(self, name, user, message=None, date=None): | |
402 | """ |
|
404 | """ | |
403 | Removes tag with the given ``name``. |
|
405 | Removes tag with the given ``name``. | |
404 |
|
406 | |||
405 | :param name: name of the tag to be removed |
|
407 | :param name: name of the tag to be removed | |
406 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
408 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
407 | :param message: message of the tag's removal commit |
|
409 | :param message: message of the tag's removal commit | |
408 | :param date: date of tag's removal commit |
|
410 | :param date: date of tag's removal commit | |
409 |
|
411 | |||
410 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
412 | :raises TagDoesNotExistError: if tag with given name does not exists | |
411 | """ |
|
413 | """ | |
412 | if name not in self.tags: |
|
414 | if name not in self.tags: | |
413 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
415 | raise TagDoesNotExistError("Tag %s does not exist" % name) | |
414 | tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name) |
|
416 | tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name) | |
415 | try: |
|
417 | try: | |
416 | os.remove(tagpath) |
|
418 | os.remove(tagpath) | |
417 | self._parsed_refs = self._get_parsed_refs() |
|
419 | self._parsed_refs = self._get_parsed_refs() | |
418 | self.tags = self._get_tags() |
|
420 | self.tags = self._get_tags() | |
419 | except OSError, e: |
|
421 | except OSError, e: | |
420 | raise RepositoryError(e.strerror) |
|
422 | raise RepositoryError(e.strerror) | |
421 |
|
423 | |||
422 | @LazyProperty |
|
424 | @LazyProperty | |
423 | def _parsed_refs(self): |
|
425 | def _parsed_refs(self): | |
424 | return self._get_parsed_refs() |
|
426 | return self._get_parsed_refs() | |
425 |
|
427 | |||
426 | def _get_parsed_refs(self): |
|
428 | def _get_parsed_refs(self): | |
427 | refs = self._repo.get_refs() |
|
429 | refs = self._repo.get_refs() | |
428 | keys = [('refs/heads/', 'H'), |
|
430 | keys = [('refs/heads/', 'H'), | |
429 | ('refs/remotes/origin/', 'RH'), |
|
431 | ('refs/remotes/origin/', 'RH'), | |
430 | ('refs/tags/', 'T')] |
|
432 | ('refs/tags/', 'T')] | |
431 | _refs = {} |
|
433 | _refs = {} | |
432 | for ref, sha in refs.iteritems(): |
|
434 | for ref, sha in refs.iteritems(): | |
433 | for k, type_ in keys: |
|
435 | for k, type_ in keys: | |
434 | if ref.startswith(k): |
|
436 | if ref.startswith(k): | |
435 | _key = ref[len(k):] |
|
437 | _key = ref[len(k):] | |
436 | if type_ == 'T': |
|
438 | if type_ == 'T': | |
437 | obj = self._repo.get_object(sha) |
|
439 | obj = self._repo.get_object(sha) | |
438 | if isinstance(obj, Tag): |
|
440 | if isinstance(obj, Tag): | |
439 | sha = self._repo.get_object(sha).object[1] |
|
441 | sha = self._repo.get_object(sha).object[1] | |
440 | _refs[_key] = [sha, type_] |
|
442 | _refs[_key] = [sha, type_] | |
441 | break |
|
443 | break | |
442 | return _refs |
|
444 | return _refs | |
443 |
|
445 | |||
444 | def _heads(self, reverse=False): |
|
446 | def _heads(self, reverse=False): | |
445 | refs = self._repo.get_refs() |
|
447 | refs = self._repo.get_refs() | |
446 | heads = {} |
|
448 | heads = {} | |
447 |
|
449 | |||
448 | for key, val in refs.items(): |
|
450 | for key, val in refs.items(): | |
449 | for ref_key in ['refs/heads/', 'refs/remotes/origin/']: |
|
451 | for ref_key in ['refs/heads/', 'refs/remotes/origin/']: | |
450 | if key.startswith(ref_key): |
|
452 | if key.startswith(ref_key): | |
451 | n = key[len(ref_key):] |
|
453 | n = key[len(ref_key):] | |
452 | if n not in ['HEAD']: |
|
454 | if n not in ['HEAD']: | |
453 | heads[n] = val |
|
455 | heads[n] = val | |
454 |
|
456 | |||
455 | return heads if reverse else dict((y, x) for x, y in heads.iteritems()) |
|
457 | return heads if reverse else dict((y, x) for x, y in heads.iteritems()) | |
456 |
|
458 | |||
457 | def get_changeset(self, revision=None): |
|
459 | def get_changeset(self, revision=None): | |
458 | """ |
|
460 | """ | |
459 | Returns ``GitChangeset`` object representing commit from git repository |
|
461 | Returns ``GitChangeset`` object representing commit from git repository | |
460 | at the given revision or head (most recent commit) if None given. |
|
462 | at the given revision or head (most recent commit) if None given. | |
461 | """ |
|
463 | """ | |
462 | if isinstance(revision, GitChangeset): |
|
464 | if isinstance(revision, GitChangeset): | |
463 | return revision |
|
465 | return revision | |
464 | revision = self._get_revision(revision) |
|
466 | revision = self._get_revision(revision) | |
465 | changeset = GitChangeset(repository=self, revision=revision) |
|
467 | changeset = GitChangeset(repository=self, revision=revision) | |
466 | return changeset |
|
468 | return changeset | |
467 |
|
469 | |||
468 | def get_changesets(self, start=None, end=None, start_date=None, |
|
470 | def get_changesets(self, start=None, end=None, start_date=None, | |
469 | end_date=None, branch_name=None, reverse=False): |
|
471 | end_date=None, branch_name=None, reverse=False): | |
470 | """ |
|
472 | """ | |
471 | Returns iterator of ``GitChangeset`` objects from start to end (both |
|
473 | Returns iterator of ``GitChangeset`` objects from start to end (both | |
472 | are inclusive), in ascending date order (unless ``reverse`` is set). |
|
474 | are inclusive), in ascending date order (unless ``reverse`` is set). | |
473 |
|
475 | |||
474 | :param start: changeset ID, as str; first returned changeset |
|
476 | :param start: changeset ID, as str; first returned changeset | |
475 | :param end: changeset ID, as str; last returned changeset |
|
477 | :param end: changeset ID, as str; last returned changeset | |
476 | :param start_date: if specified, changesets with commit date less than |
|
478 | :param start_date: if specified, changesets with commit date less than | |
477 | ``start_date`` would be filtered out from returned set |
|
479 | ``start_date`` would be filtered out from returned set | |
478 | :param end_date: if specified, changesets with commit date greater than |
|
480 | :param end_date: if specified, changesets with commit date greater than | |
479 | ``end_date`` would be filtered out from returned set |
|
481 | ``end_date`` would be filtered out from returned set | |
480 | :param branch_name: if specified, changesets not reachable from given |
|
482 | :param branch_name: if specified, changesets not reachable from given | |
481 | branch would be filtered out from returned set |
|
483 | branch would be filtered out from returned set | |
482 | :param reverse: if ``True``, returned generator would be reversed |
|
484 | :param reverse: if ``True``, returned generator would be reversed | |
483 | (meaning that returned changesets would have descending date order) |
|
485 | (meaning that returned changesets would have descending date order) | |
484 |
|
486 | |||
485 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
487 | :raise BranchDoesNotExistError: If given ``branch_name`` does not | |
486 | exist. |
|
488 | exist. | |
487 | :raise ChangesetDoesNotExistError: If changeset for given ``start`` or |
|
489 | :raise ChangesetDoesNotExistError: If changeset for given ``start`` or | |
488 | ``end`` could not be found. |
|
490 | ``end`` could not be found. | |
489 |
|
491 | |||
490 | """ |
|
492 | """ | |
491 | if branch_name and branch_name not in self.branches: |
|
493 | if branch_name and branch_name not in self.branches: | |
492 | raise BranchDoesNotExistError("Branch '%s' not found" \ |
|
494 | raise BranchDoesNotExistError("Branch '%s' not found" \ | |
493 | % branch_name) |
|
495 | % branch_name) | |
494 | # %H at format means (full) commit hash, initial hashes are retrieved |
|
496 | # %H at format means (full) commit hash, initial hashes are retrieved | |
495 | # in ascending date order |
|
497 | # in ascending date order | |
496 | cmd_template = 'log --date-order --reverse --pretty=format:"%H"' |
|
498 | cmd_template = 'log --date-order --reverse --pretty=format:"%H"' | |
497 | cmd_params = {} |
|
499 | cmd_params = {} | |
498 | if start_date: |
|
500 | if start_date: | |
499 | cmd_template += ' --since "$since"' |
|
501 | cmd_template += ' --since "$since"' | |
500 | cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S') |
|
502 | cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S') | |
501 | if end_date: |
|
503 | if end_date: | |
502 | cmd_template += ' --until "$until"' |
|
504 | cmd_template += ' --until "$until"' | |
503 | cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S') |
|
505 | cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S') | |
504 | if branch_name: |
|
506 | if branch_name: | |
505 | cmd_template += ' $branch_name' |
|
507 | cmd_template += ' $branch_name' | |
506 | cmd_params['branch_name'] = branch_name |
|
508 | cmd_params['branch_name'] = branch_name | |
507 | else: |
|
509 | else: | |
508 | cmd_template += ' --all' |
|
510 | rev_filter = _git_path = rhodecode.CONFIG.get('git_rev_filter', | |
|
511 | '--all').strip() | |||
|
512 | cmd_template += ' %s' % (rev_filter) | |||
509 |
|
513 | |||
510 | cmd = Template(cmd_template).safe_substitute(**cmd_params) |
|
514 | cmd = Template(cmd_template).safe_substitute(**cmd_params) | |
511 | revs = self.run_git_command(cmd)[0].splitlines() |
|
515 | revs = self.run_git_command(cmd)[0].splitlines() | |
512 | start_pos = 0 |
|
516 | start_pos = 0 | |
513 | end_pos = len(revs) |
|
517 | end_pos = len(revs) | |
514 | if start: |
|
518 | if start: | |
515 | _start = self._get_revision(start) |
|
519 | _start = self._get_revision(start) | |
516 | try: |
|
520 | try: | |
517 | start_pos = revs.index(_start) |
|
521 | start_pos = revs.index(_start) | |
518 | except ValueError: |
|
522 | except ValueError: | |
519 | pass |
|
523 | pass | |
520 |
|
524 | |||
521 | if end is not None: |
|
525 | if end is not None: | |
522 | _end = self._get_revision(end) |
|
526 | _end = self._get_revision(end) | |
523 | try: |
|
527 | try: | |
524 | end_pos = revs.index(_end) |
|
528 | end_pos = revs.index(_end) | |
525 | except ValueError: |
|
529 | except ValueError: | |
526 | pass |
|
530 | pass | |
527 |
|
531 | |||
528 | if None not in [start, end] and start_pos > end_pos: |
|
532 | if None not in [start, end] and start_pos > end_pos: | |
529 | raise RepositoryError('start cannot be after end') |
|
533 | raise RepositoryError('start cannot be after end') | |
530 |
|
534 | |||
531 | if end_pos is not None: |
|
535 | if end_pos is not None: | |
532 | end_pos += 1 |
|
536 | end_pos += 1 | |
533 |
|
537 | |||
534 | revs = revs[start_pos:end_pos] |
|
538 | revs = revs[start_pos:end_pos] | |
535 | if reverse: |
|
539 | if reverse: | |
536 | revs = reversed(revs) |
|
540 | revs = reversed(revs) | |
537 | for rev in revs: |
|
541 | for rev in revs: | |
538 | yield self.get_changeset(rev) |
|
542 | yield self.get_changeset(rev) | |
539 |
|
543 | |||
540 | def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, |
|
544 | def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, | |
541 | context=3): |
|
545 | context=3): | |
542 | """ |
|
546 | """ | |
543 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
547 | Returns (git like) *diff*, as plain text. Shows changes introduced by | |
544 | ``rev2`` since ``rev1``. |
|
548 | ``rev2`` since ``rev1``. | |
545 |
|
549 | |||
546 | :param rev1: Entry point from which diff is shown. Can be |
|
550 | :param rev1: Entry point from which diff is shown. Can be | |
547 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all |
|
551 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all | |
548 | the changes since empty state of the repository until ``rev2`` |
|
552 | the changes since empty state of the repository until ``rev2`` | |
549 | :param rev2: Until which revision changes should be shown. |
|
553 | :param rev2: Until which revision changes should be shown. | |
550 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
554 | :param ignore_whitespace: If set to ``True``, would not show whitespace | |
551 | changes. Defaults to ``False``. |
|
555 | changes. Defaults to ``False``. | |
552 | :param context: How many lines before/after changed lines should be |
|
556 | :param context: How many lines before/after changed lines should be | |
553 | shown. Defaults to ``3``. |
|
557 | shown. Defaults to ``3``. | |
554 | """ |
|
558 | """ | |
555 | flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40'] |
|
559 | flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40'] | |
556 | if ignore_whitespace: |
|
560 | if ignore_whitespace: | |
557 | flags.append('-w') |
|
561 | flags.append('-w') | |
558 |
|
562 | |||
559 | if hasattr(rev1, 'raw_id'): |
|
563 | if hasattr(rev1, 'raw_id'): | |
560 | rev1 = getattr(rev1, 'raw_id') |
|
564 | rev1 = getattr(rev1, 'raw_id') | |
561 |
|
565 | |||
562 | if hasattr(rev2, 'raw_id'): |
|
566 | if hasattr(rev2, 'raw_id'): | |
563 | rev2 = getattr(rev2, 'raw_id') |
|
567 | rev2 = getattr(rev2, 'raw_id') | |
564 |
|
568 | |||
565 | if rev1 == self.EMPTY_CHANGESET: |
|
569 | if rev1 == self.EMPTY_CHANGESET: | |
566 | rev2 = self.get_changeset(rev2).raw_id |
|
570 | rev2 = self.get_changeset(rev2).raw_id | |
567 | cmd = ' '.join(['show'] + flags + [rev2]) |
|
571 | cmd = ' '.join(['show'] + flags + [rev2]) | |
568 | else: |
|
572 | else: | |
569 | rev1 = self.get_changeset(rev1).raw_id |
|
573 | rev1 = self.get_changeset(rev1).raw_id | |
570 | rev2 = self.get_changeset(rev2).raw_id |
|
574 | rev2 = self.get_changeset(rev2).raw_id | |
571 | cmd = ' '.join(['diff'] + flags + [rev1, rev2]) |
|
575 | cmd = ' '.join(['diff'] + flags + [rev1, rev2]) | |
572 |
|
576 | |||
573 | if path: |
|
577 | if path: | |
574 | cmd += ' -- "%s"' % path |
|
578 | cmd += ' -- "%s"' % path | |
575 |
|
579 | |||
576 | stdout, stderr = self.run_git_command(cmd) |
|
580 | stdout, stderr = self.run_git_command(cmd) | |
577 | # If we used 'show' command, strip first few lines (until actual diff |
|
581 | # If we used 'show' command, strip first few lines (until actual diff | |
578 | # starts) |
|
582 | # starts) | |
579 | if rev1 == self.EMPTY_CHANGESET: |
|
583 | if rev1 == self.EMPTY_CHANGESET: | |
580 | lines = stdout.splitlines() |
|
584 | lines = stdout.splitlines() | |
581 | x = 0 |
|
585 | x = 0 | |
582 | for line in lines: |
|
586 | for line in lines: | |
583 | if line.startswith('diff'): |
|
587 | if line.startswith('diff'): | |
584 | break |
|
588 | break | |
585 | x += 1 |
|
589 | x += 1 | |
586 | # Append new line just like 'diff' command do |
|
590 | # Append new line just like 'diff' command do | |
587 | stdout = '\n'.join(lines[x:]) + '\n' |
|
591 | stdout = '\n'.join(lines[x:]) + '\n' | |
588 | return stdout |
|
592 | return stdout | |
589 |
|
593 | |||
590 | @LazyProperty |
|
594 | @LazyProperty | |
591 | def in_memory_changeset(self): |
|
595 | def in_memory_changeset(self): | |
592 | """ |
|
596 | """ | |
593 | Returns ``GitInMemoryChangeset`` object for this repository. |
|
597 | Returns ``GitInMemoryChangeset`` object for this repository. | |
594 | """ |
|
598 | """ | |
595 | return GitInMemoryChangeset(self) |
|
599 | return GitInMemoryChangeset(self) | |
596 |
|
600 | |||
597 | def clone(self, url, update_after_clone=True, bare=False): |
|
601 | def clone(self, url, update_after_clone=True, bare=False): | |
598 | """ |
|
602 | """ | |
599 | Tries to clone changes from external location. |
|
603 | Tries to clone changes from external location. | |
600 |
|
604 | |||
601 | :param update_after_clone: If set to ``False``, git won't checkout |
|
605 | :param update_after_clone: If set to ``False``, git won't checkout | |
602 | working directory |
|
606 | working directory | |
603 | :param bare: If set to ``True``, repository would be cloned into |
|
607 | :param bare: If set to ``True``, repository would be cloned into | |
604 | *bare* git repository (no working directory at all). |
|
608 | *bare* git repository (no working directory at all). | |
605 | """ |
|
609 | """ | |
606 | url = self._get_url(url) |
|
610 | url = self._get_url(url) | |
607 | cmd = ['clone'] |
|
611 | cmd = ['clone'] | |
608 | if bare: |
|
612 | if bare: | |
609 | cmd.append('--bare') |
|
613 | cmd.append('--bare') | |
610 | elif not update_after_clone: |
|
614 | elif not update_after_clone: | |
611 | cmd.append('--no-checkout') |
|
615 | cmd.append('--no-checkout') | |
612 | cmd += ['--', '"%s"' % url, '"%s"' % self.path] |
|
616 | cmd += ['--', '"%s"' % url, '"%s"' % self.path] | |
613 | cmd = ' '.join(cmd) |
|
617 | cmd = ' '.join(cmd) | |
614 | # If error occurs run_git_command raises RepositoryError already |
|
618 | # If error occurs run_git_command raises RepositoryError already | |
615 | self.run_git_command(cmd) |
|
619 | self.run_git_command(cmd) | |
616 |
|
620 | |||
617 | def pull(self, url): |
|
621 | def pull(self, url): | |
618 | """ |
|
622 | """ | |
619 | Tries to pull changes from external location. |
|
623 | Tries to pull changes from external location. | |
620 | """ |
|
624 | """ | |
621 | url = self._get_url(url) |
|
625 | url = self._get_url(url) | |
622 | cmd = ['pull'] |
|
626 | cmd = ['pull'] | |
623 | cmd.append("--ff-only") |
|
627 | cmd.append("--ff-only") | |
624 | cmd.append(url) |
|
628 | cmd.append(url) | |
625 | cmd = ' '.join(cmd) |
|
629 | cmd = ' '.join(cmd) | |
626 | # If error occurs run_git_command raises RepositoryError already |
|
630 | # If error occurs run_git_command raises RepositoryError already | |
627 | self.run_git_command(cmd) |
|
631 | self.run_git_command(cmd) | |
628 |
|
632 | |||
629 | def fetch(self, url): |
|
633 | def fetch(self, url): | |
630 | """ |
|
634 | """ | |
631 | Tries to pull changes from external location. |
|
635 | Tries to pull changes from external location. | |
632 | """ |
|
636 | """ | |
633 | url = self._get_url(url) |
|
637 | url = self._get_url(url) | |
634 | so, se = self.run_git_command('ls-remote -h %s' % url) |
|
638 | so, se = self.run_git_command('ls-remote -h %s' % url) | |
635 | refs = [] |
|
639 | refs = [] | |
636 | for line in (x for x in so.splitlines()): |
|
640 | for line in (x for x in so.splitlines()): | |
637 | sha, ref = line.split('\t') |
|
641 | sha, ref = line.split('\t') | |
638 | refs.append(ref) |
|
642 | refs.append(ref) | |
639 | refs = ' '.join(('+%s:%s' % (r, r) for r in refs)) |
|
643 | refs = ' '.join(('+%s:%s' % (r, r) for r in refs)) | |
640 | cmd = '''fetch %s -- %s''' % (url, refs) |
|
644 | cmd = '''fetch %s -- %s''' % (url, refs) | |
641 | self.run_git_command(cmd) |
|
645 | self.run_git_command(cmd) | |
642 |
|
646 | |||
643 | @LazyProperty |
|
647 | @LazyProperty | |
644 | def workdir(self): |
|
648 | def workdir(self): | |
645 | """ |
|
649 | """ | |
646 | Returns ``Workdir`` instance for this repository. |
|
650 | Returns ``Workdir`` instance for this repository. | |
647 | """ |
|
651 | """ | |
648 | return GitWorkdir(self) |
|
652 | return GitWorkdir(self) | |
649 |
|
653 | |||
650 | def get_config_value(self, section, name, config_file=None): |
|
654 | def get_config_value(self, section, name, config_file=None): | |
651 | """ |
|
655 | """ | |
652 | Returns configuration value for a given [``section``] and ``name``. |
|
656 | Returns configuration value for a given [``section``] and ``name``. | |
653 |
|
657 | |||
654 | :param section: Section we want to retrieve value from |
|
658 | :param section: Section we want to retrieve value from | |
655 | :param name: Name of configuration we want to retrieve |
|
659 | :param name: Name of configuration we want to retrieve | |
656 | :param config_file: A path to file which should be used to retrieve |
|
660 | :param config_file: A path to file which should be used to retrieve | |
657 | configuration from (might also be a list of file paths) |
|
661 | configuration from (might also be a list of file paths) | |
658 | """ |
|
662 | """ | |
659 | if config_file is None: |
|
663 | if config_file is None: | |
660 | config_file = [] |
|
664 | config_file = [] | |
661 | elif isinstance(config_file, basestring): |
|
665 | elif isinstance(config_file, basestring): | |
662 | config_file = [config_file] |
|
666 | config_file = [config_file] | |
663 |
|
667 | |||
664 | def gen_configs(): |
|
668 | def gen_configs(): | |
665 | for path in config_file + self._config_files: |
|
669 | for path in config_file + self._config_files: | |
666 | try: |
|
670 | try: | |
667 | yield ConfigFile.from_path(path) |
|
671 | yield ConfigFile.from_path(path) | |
668 | except (IOError, OSError, ValueError): |
|
672 | except (IOError, OSError, ValueError): | |
669 | continue |
|
673 | continue | |
670 |
|
674 | |||
671 | for config in gen_configs(): |
|
675 | for config in gen_configs(): | |
672 | try: |
|
676 | try: | |
673 | return config.get(section, name) |
|
677 | return config.get(section, name) | |
674 | except KeyError: |
|
678 | except KeyError: | |
675 | continue |
|
679 | continue | |
676 | return None |
|
680 | return None | |
677 |
|
681 | |||
678 | def get_user_name(self, config_file=None): |
|
682 | def get_user_name(self, config_file=None): | |
679 | """ |
|
683 | """ | |
680 | Returns user's name from global configuration file. |
|
684 | Returns user's name from global configuration file. | |
681 |
|
685 | |||
682 | :param config_file: A path to file which should be used to retrieve |
|
686 | :param config_file: A path to file which should be used to retrieve | |
683 | configuration from (might also be a list of file paths) |
|
687 | configuration from (might also be a list of file paths) | |
684 | """ |
|
688 | """ | |
685 | return self.get_config_value('user', 'name', config_file) |
|
689 | return self.get_config_value('user', 'name', config_file) | |
686 |
|
690 | |||
687 | def get_user_email(self, config_file=None): |
|
691 | def get_user_email(self, config_file=None): | |
688 | """ |
|
692 | """ | |
689 | Returns user's email from global configuration file. |
|
693 | Returns user's email from global configuration file. | |
690 |
|
694 | |||
691 | :param config_file: A path to file which should be used to retrieve |
|
695 | :param config_file: A path to file which should be used to retrieve | |
692 | configuration from (might also be a list of file paths) |
|
696 | configuration from (might also be a list of file paths) | |
693 | """ |
|
697 | """ | |
694 | return self.get_config_value('user', 'email', config_file) |
|
698 | return self.get_config_value('user', 'email', config_file) |
@@ -1,478 +1,482 b'' | |||||
1 | ################################################################################ |
|
1 | ################################################################################ | |
2 | ################################################################################ |
|
2 | ################################################################################ | |
3 | # RhodeCode - Pylons environment configuration # |
|
3 | # RhodeCode - Pylons environment configuration # | |
4 | # # |
|
4 | # # | |
5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
5 | # The %(here)s variable will be replaced with the parent directory of this file# | |
6 | ################################################################################ |
|
6 | ################################################################################ | |
7 |
|
7 | |||
8 | [DEFAULT] |
|
8 | [DEFAULT] | |
9 | debug = true |
|
9 | debug = true | |
10 | pdebug = false |
|
10 | pdebug = false | |
11 | ################################################################################ |
|
11 | ################################################################################ | |
12 | ## Uncomment and replace with the address which should receive ## |
|
12 | ## Uncomment and replace with the address which should receive ## | |
13 | ## any error reports after application crash ## |
|
13 | ## any error reports after application crash ## | |
14 | ## Additionally those settings will be used by RhodeCode mailing system ## |
|
14 | ## Additionally those settings will be used by RhodeCode mailing system ## | |
15 | ################################################################################ |
|
15 | ################################################################################ | |
16 | #email_to = admin@localhost |
|
16 | #email_to = admin@localhost | |
17 | #error_email_from = paste_error@localhost |
|
17 | #error_email_from = paste_error@localhost | |
18 | #app_email_from = rhodecode-noreply@localhost |
|
18 | #app_email_from = rhodecode-noreply@localhost | |
19 | #error_message = |
|
19 | #error_message = | |
20 | #email_prefix = [RhodeCode] |
|
20 | #email_prefix = [RhodeCode] | |
21 |
|
21 | |||
22 | #smtp_server = mail.server.com |
|
22 | #smtp_server = mail.server.com | |
23 | #smtp_username = |
|
23 | #smtp_username = | |
24 | #smtp_password = |
|
24 | #smtp_password = | |
25 | #smtp_port = |
|
25 | #smtp_port = | |
26 | #smtp_use_tls = false |
|
26 | #smtp_use_tls = false | |
27 | #smtp_use_ssl = true |
|
27 | #smtp_use_ssl = true | |
28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) | |
29 | #smtp_auth = |
|
29 | #smtp_auth = | |
30 |
|
30 | |||
31 | [server:main] |
|
31 | [server:main] | |
32 | ## PASTE |
|
32 | ## PASTE | |
33 | ## nr of threads to spawn |
|
33 | ## nr of threads to spawn | |
34 | #threadpool_workers = 5 |
|
34 | #threadpool_workers = 5 | |
35 |
|
35 | |||
36 | ## max request before thread respawn |
|
36 | ## max request before thread respawn | |
37 | #threadpool_max_requests = 10 |
|
37 | #threadpool_max_requests = 10 | |
38 |
|
38 | |||
39 | ## option to use threads of process |
|
39 | ## option to use threads of process | |
40 | #use_threadpool = true |
|
40 | #use_threadpool = true | |
41 |
|
41 | |||
42 | #use = egg:Paste#http |
|
42 | #use = egg:Paste#http | |
43 |
|
43 | |||
44 | ## WAITRESS |
|
44 | ## WAITRESS | |
45 | threads = 5 |
|
45 | threads = 5 | |
46 | ## 100GB |
|
46 | ## 100GB | |
47 | max_request_body_size = 107374182400 |
|
47 | max_request_body_size = 107374182400 | |
48 | use = egg:waitress#main |
|
48 | use = egg:waitress#main | |
49 |
|
49 | |||
50 | host = 127.0.0.1 |
|
50 | host = 127.0.0.1 | |
51 | port = 5000 |
|
51 | port = 5000 | |
52 |
|
52 | |||
53 | ## prefix middleware for rc |
|
53 | ## prefix middleware for rc | |
54 | #[filter:proxy-prefix] |
|
54 | #[filter:proxy-prefix] | |
55 | #use = egg:PasteDeploy#prefix |
|
55 | #use = egg:PasteDeploy#prefix | |
56 | #prefix = /<your-prefix> |
|
56 | #prefix = /<your-prefix> | |
57 |
|
57 | |||
58 | [app:main] |
|
58 | [app:main] | |
59 | use = egg:rhodecode |
|
59 | use = egg:rhodecode | |
60 | ## enable proxy prefix middleware |
|
60 | ## enable proxy prefix middleware | |
61 | #filter-with = proxy-prefix |
|
61 | #filter-with = proxy-prefix | |
62 |
|
62 | |||
63 | full_stack = true |
|
63 | full_stack = true | |
64 | static_files = true |
|
64 | static_files = true | |
65 | ## Optional Languages |
|
65 | ## Optional Languages | |
66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl |
|
66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl | |
67 | lang = en |
|
67 | lang = en | |
68 | cache_dir = /tmp/rc/data |
|
68 | cache_dir = /tmp/rc/data | |
69 | index_dir = /tmp/rc/index |
|
69 | index_dir = /tmp/rc/index | |
70 |
|
70 | |||
71 | ## uncomment and set this path to use archive download cache |
|
71 | ## uncomment and set this path to use archive download cache | |
72 | #archive_cache_dir = /tmp/tarballcache |
|
72 | #archive_cache_dir = /tmp/tarballcache | |
73 |
|
73 | |||
74 | ## change this to unique ID for security |
|
74 | ## change this to unique ID for security | |
75 | app_instance_uuid = rc-production |
|
75 | app_instance_uuid = rc-production | |
76 |
|
76 | |||
77 | ## cut off limit for large diffs (size in bytes) |
|
77 | ## cut off limit for large diffs (size in bytes) | |
78 | cut_off_limit = 256000 |
|
78 | cut_off_limit = 256000 | |
79 |
|
79 | |||
80 | ## use cache version of scm repo everywhere |
|
80 | ## use cache version of scm repo everywhere | |
81 | vcs_full_cache = false |
|
81 | vcs_full_cache = false | |
82 |
|
82 | |||
83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https | |
84 | force_https = false |
|
84 | force_https = false | |
85 |
|
85 | |||
86 | ## use Strict-Transport-Security headers |
|
86 | ## use Strict-Transport-Security headers | |
87 | use_htsts = false |
|
87 | use_htsts = false | |
88 |
|
88 | |||
89 | ## number of commits stats will parse on each iteration |
|
89 | ## number of commits stats will parse on each iteration | |
90 | commit_parse_limit = 25 |
|
90 | commit_parse_limit = 25 | |
91 |
|
91 | |||
92 | ## number of items displayed in lightweight dashboard before paginating is shown |
|
92 | ## number of items displayed in lightweight dashboard before paginating is shown | |
93 | dashboard_items = 100 |
|
93 | dashboard_items = 100 | |
94 |
|
94 | |||
95 | ## use gravatar service to display avatars |
|
95 | ## use gravatar service to display avatars | |
96 | use_gravatar = true |
|
96 | use_gravatar = true | |
97 |
|
97 | |||
98 | ## path to git executable |
|
98 | ## path to git executable | |
99 | git_path = git |
|
99 | git_path = git | |
100 |
|
100 | |||
|
101 | ## git rev filter option, --all is the default filter, if you need to | |||
|
102 | ## hide all refs in changelog switch this to --branches --tags | |||
|
103 | git_rev_filter=--all | |||
|
104 | ||||
101 | ## RSS feed options |
|
105 | ## RSS feed options | |
102 | rss_cut_off_limit = 256000 |
|
106 | rss_cut_off_limit = 256000 | |
103 | rss_items_per_page = 10 |
|
107 | rss_items_per_page = 10 | |
104 | rss_include_diff = false |
|
108 | rss_include_diff = false | |
105 |
|
109 | |||
106 | ## show hash options for changelog |
|
110 | ## show hash options for changelog | |
107 | sha_len = 12 |
|
111 | sha_len = 12 | |
108 | sha_rev = true |
|
112 | sha_rev = true | |
109 |
|
113 | |||
110 |
|
114 | |||
111 | ## alternative_gravatar_url allows you to use your own avatar server application |
|
115 | ## alternative_gravatar_url allows you to use your own avatar server application | |
112 | ## the following parts of the URL will be replaced |
|
116 | ## the following parts of the URL will be replaced | |
113 | ## {email} user email |
|
117 | ## {email} user email | |
114 | ## {md5email} md5 hash of the user email (like at gravatar.com) |
|
118 | ## {md5email} md5 hash of the user email (like at gravatar.com) | |
115 | ## {size} size of the image that is expected from the server application |
|
119 | ## {size} size of the image that is expected from the server application | |
116 | ## {scheme} http/https from RhodeCode server |
|
120 | ## {scheme} http/https from RhodeCode server | |
117 | ## {netloc} network location from RhodeCode server |
|
121 | ## {netloc} network location from RhodeCode server | |
118 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} |
|
122 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} | |
119 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} |
|
123 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} | |
120 |
|
124 | |||
121 |
|
125 | |||
122 | ## container auth options |
|
126 | ## container auth options | |
123 | container_auth_enabled = false |
|
127 | container_auth_enabled = false | |
124 | proxypass_auth_enabled = false |
|
128 | proxypass_auth_enabled = false | |
125 |
|
129 | |||
126 | ## default encoding used to convert from and to unicode |
|
130 | ## default encoding used to convert from and to unicode | |
127 | ## can be also a comma seperated list of encoding in case of mixed encodings |
|
131 | ## can be also a comma seperated list of encoding in case of mixed encodings | |
128 | default_encoding = utf8 |
|
132 | default_encoding = utf8 | |
129 |
|
133 | |||
130 | ## overwrite schema of clone url |
|
134 | ## overwrite schema of clone url | |
131 | ## available vars: |
|
135 | ## available vars: | |
132 | ## scheme - http/https |
|
136 | ## scheme - http/https | |
133 | ## user - current user |
|
137 | ## user - current user | |
134 | ## pass - password |
|
138 | ## pass - password | |
135 | ## netloc - network location |
|
139 | ## netloc - network location | |
136 | ## path - usually repo_name |
|
140 | ## path - usually repo_name | |
137 |
|
141 | |||
138 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} |
|
142 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} | |
139 |
|
143 | |||
140 | ## issue tracking mapping for commits messages |
|
144 | ## issue tracking mapping for commits messages | |
141 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
145 | ## comment out issue_pat, issue_server, issue_prefix to enable | |
142 |
|
146 | |||
143 | ## pattern to get the issues from commit messages |
|
147 | ## pattern to get the issues from commit messages | |
144 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
148 | ## default one used here is #<numbers> with a regex passive group for `#` | |
145 | ## {id} will be all groups matched from this pattern |
|
149 | ## {id} will be all groups matched from this pattern | |
146 |
|
150 | |||
147 | issue_pat = (?:\s*#)(\d+) |
|
151 | issue_pat = (?:\s*#)(\d+) | |
148 |
|
152 | |||
149 | ## server url to the issue, each {id} will be replaced with match |
|
153 | ## server url to the issue, each {id} will be replaced with match | |
150 | ## fetched from the regex and {repo} is replaced with full repository name |
|
154 | ## fetched from the regex and {repo} is replaced with full repository name | |
151 | ## including groups {repo_name} is replaced with just name of repo |
|
155 | ## including groups {repo_name} is replaced with just name of repo | |
152 |
|
156 | |||
153 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} |
|
157 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} | |
154 |
|
158 | |||
155 | ## prefix to add to link to indicate it's an url |
|
159 | ## prefix to add to link to indicate it's an url | |
156 | ## #314 will be replaced by <issue_prefix><id> |
|
160 | ## #314 will be replaced by <issue_prefix><id> | |
157 |
|
161 | |||
158 | issue_prefix = # |
|
162 | issue_prefix = # | |
159 |
|
163 | |||
160 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
164 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify | |
161 | ## multiple patterns, to other issues server, wiki or others |
|
165 | ## multiple patterns, to other issues server, wiki or others | |
162 | ## below an example how to create a wiki pattern |
|
166 | ## below an example how to create a wiki pattern | |
163 | # #wiki-some-id -> https://mywiki.com/some-id |
|
167 | # #wiki-some-id -> https://mywiki.com/some-id | |
164 |
|
168 | |||
165 | #issue_pat_wiki = (?:wiki-)(.+) |
|
169 | #issue_pat_wiki = (?:wiki-)(.+) | |
166 | #issue_server_link_wiki = https://mywiki.com/{id} |
|
170 | #issue_server_link_wiki = https://mywiki.com/{id} | |
167 | #issue_prefix_wiki = WIKI- |
|
171 | #issue_prefix_wiki = WIKI- | |
168 |
|
172 | |||
169 |
|
173 | |||
170 | ## instance-id prefix |
|
174 | ## instance-id prefix | |
171 | ## a prefix key for this instance used for cache invalidation when running |
|
175 | ## a prefix key for this instance used for cache invalidation when running | |
172 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
176 | ## multiple instances of rhodecode, make sure it's globally unique for | |
173 | ## all running rhodecode instances. Leave empty if you don't use it |
|
177 | ## all running rhodecode instances. Leave empty if you don't use it | |
174 | instance_id = |
|
178 | instance_id = | |
175 |
|
179 | |||
176 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
180 | ## alternative return HTTP header for failed authentication. Default HTTP | |
177 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
181 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
178 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
182 | ## handling that. Set this variable to 403 to return HTTPForbidden | |
179 | auth_ret_code = |
|
183 | auth_ret_code = | |
180 |
|
184 | |||
181 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
185 | ## locking return code. When repository is locked return this HTTP code. 2XX | |
182 | ## codes don't break the transactions while 4XX codes do |
|
186 | ## codes don't break the transactions while 4XX codes do | |
183 | lock_ret_code = 423 |
|
187 | lock_ret_code = 423 | |
184 |
|
188 | |||
185 |
|
189 | |||
186 | #################################### |
|
190 | #################################### | |
187 | ### CELERY CONFIG #### |
|
191 | ### CELERY CONFIG #### | |
188 | #################################### |
|
192 | #################################### | |
189 | use_celery = false |
|
193 | use_celery = false | |
190 | broker.host = localhost |
|
194 | broker.host = localhost | |
191 | broker.vhost = rabbitmqhost |
|
195 | broker.vhost = rabbitmqhost | |
192 | broker.port = 5672 |
|
196 | broker.port = 5672 | |
193 | broker.user = rabbitmq |
|
197 | broker.user = rabbitmq | |
194 | broker.password = qweqwe |
|
198 | broker.password = qweqwe | |
195 |
|
199 | |||
196 | celery.imports = rhodecode.lib.celerylib.tasks |
|
200 | celery.imports = rhodecode.lib.celerylib.tasks | |
197 |
|
201 | |||
198 | celery.result.backend = amqp |
|
202 | celery.result.backend = amqp | |
199 | celery.result.dburi = amqp:// |
|
203 | celery.result.dburi = amqp:// | |
200 | celery.result.serialier = json |
|
204 | celery.result.serialier = json | |
201 |
|
205 | |||
202 | #celery.send.task.error.emails = true |
|
206 | #celery.send.task.error.emails = true | |
203 | #celery.amqp.task.result.expires = 18000 |
|
207 | #celery.amqp.task.result.expires = 18000 | |
204 |
|
208 | |||
205 | celeryd.concurrency = 2 |
|
209 | celeryd.concurrency = 2 | |
206 | #celeryd.log.file = celeryd.log |
|
210 | #celeryd.log.file = celeryd.log | |
207 | celeryd.log.level = debug |
|
211 | celeryd.log.level = debug | |
208 | celeryd.max.tasks.per.child = 1 |
|
212 | celeryd.max.tasks.per.child = 1 | |
209 |
|
213 | |||
210 | ## tasks will never be sent to the queue, but executed locally instead. |
|
214 | ## tasks will never be sent to the queue, but executed locally instead. | |
211 | celery.always.eager = false |
|
215 | celery.always.eager = false | |
212 |
|
216 | |||
213 | #################################### |
|
217 | #################################### | |
214 | ### BEAKER CACHE #### |
|
218 | ### BEAKER CACHE #### | |
215 | #################################### |
|
219 | #################################### | |
216 | beaker.cache.data_dir=/tmp/rc/data/cache/data |
|
220 | beaker.cache.data_dir=/tmp/rc/data/cache/data | |
217 | beaker.cache.lock_dir=/tmp/rc/data/cache/lock |
|
221 | beaker.cache.lock_dir=/tmp/rc/data/cache/lock | |
218 |
|
222 | |||
219 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long |
|
223 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long | |
220 |
|
224 | |||
221 | beaker.cache.super_short_term.type=memory |
|
225 | beaker.cache.super_short_term.type=memory | |
222 | beaker.cache.super_short_term.expire=1 |
|
226 | beaker.cache.super_short_term.expire=1 | |
223 | beaker.cache.super_short_term.key_length = 256 |
|
227 | beaker.cache.super_short_term.key_length = 256 | |
224 |
|
228 | |||
225 | beaker.cache.short_term.type=memory |
|
229 | beaker.cache.short_term.type=memory | |
226 | beaker.cache.short_term.expire=60 |
|
230 | beaker.cache.short_term.expire=60 | |
227 | beaker.cache.short_term.key_length = 256 |
|
231 | beaker.cache.short_term.key_length = 256 | |
228 |
|
232 | |||
229 | beaker.cache.long_term.type=memory |
|
233 | beaker.cache.long_term.type=memory | |
230 | beaker.cache.long_term.expire=36000 |
|
234 | beaker.cache.long_term.expire=36000 | |
231 | beaker.cache.long_term.key_length = 256 |
|
235 | beaker.cache.long_term.key_length = 256 | |
232 |
|
236 | |||
233 | beaker.cache.sql_cache_short.type=memory |
|
237 | beaker.cache.sql_cache_short.type=memory | |
234 | beaker.cache.sql_cache_short.expire=1 |
|
238 | beaker.cache.sql_cache_short.expire=1 | |
235 | beaker.cache.sql_cache_short.key_length = 256 |
|
239 | beaker.cache.sql_cache_short.key_length = 256 | |
236 |
|
240 | |||
237 | beaker.cache.sql_cache_med.type=memory |
|
241 | beaker.cache.sql_cache_med.type=memory | |
238 | beaker.cache.sql_cache_med.expire=360 |
|
242 | beaker.cache.sql_cache_med.expire=360 | |
239 | beaker.cache.sql_cache_med.key_length = 256 |
|
243 | beaker.cache.sql_cache_med.key_length = 256 | |
240 |
|
244 | |||
241 | beaker.cache.sql_cache_long.type=file |
|
245 | beaker.cache.sql_cache_long.type=file | |
242 | beaker.cache.sql_cache_long.expire=3600 |
|
246 | beaker.cache.sql_cache_long.expire=3600 | |
243 | beaker.cache.sql_cache_long.key_length = 256 |
|
247 | beaker.cache.sql_cache_long.key_length = 256 | |
244 |
|
248 | |||
245 | #################################### |
|
249 | #################################### | |
246 | ### BEAKER SESSION #### |
|
250 | ### BEAKER SESSION #### | |
247 | #################################### |
|
251 | #################################### | |
248 | ## Type of storage used for the session, current types are |
|
252 | ## Type of storage used for the session, current types are | |
249 | ## dbm, file, memcached, database, and memory. |
|
253 | ## dbm, file, memcached, database, and memory. | |
250 | ## The storage uses the Container API |
|
254 | ## The storage uses the Container API | |
251 | ## that is also used by the cache system. |
|
255 | ## that is also used by the cache system. | |
252 |
|
256 | |||
253 | ## db session ## |
|
257 | ## db session ## | |
254 | #beaker.session.type = ext:database |
|
258 | #beaker.session.type = ext:database | |
255 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode |
|
259 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode | |
256 | #beaker.session.table_name = db_session |
|
260 | #beaker.session.table_name = db_session | |
257 |
|
261 | |||
258 | ## encrypted cookie client side session, good for many instances ## |
|
262 | ## encrypted cookie client side session, good for many instances ## | |
259 | #beaker.session.type = cookie |
|
263 | #beaker.session.type = cookie | |
260 |
|
264 | |||
261 | ## file based cookies (default) ## |
|
265 | ## file based cookies (default) ## | |
262 | #beaker.session.type = file |
|
266 | #beaker.session.type = file | |
263 |
|
267 | |||
264 |
|
268 | |||
265 | beaker.session.key = rhodecode |
|
269 | beaker.session.key = rhodecode | |
266 | ## secure cookie requires AES python libraries |
|
270 | ## secure cookie requires AES python libraries | |
267 | #beaker.session.encrypt_key = <key_for_encryption> |
|
271 | #beaker.session.encrypt_key = <key_for_encryption> | |
268 | #beaker.session.validate_key = <validation_key> |
|
272 | #beaker.session.validate_key = <validation_key> | |
269 |
|
273 | |||
270 | ## sets session as invalid if it haven't been accessed for given amount of time |
|
274 | ## sets session as invalid if it haven't been accessed for given amount of time | |
271 | beaker.session.timeout = 3600 |
|
275 | beaker.session.timeout = 3600 | |
272 | beaker.session.httponly = true |
|
276 | beaker.session.httponly = true | |
273 | #beaker.session.cookie_path = /<your-prefix> |
|
277 | #beaker.session.cookie_path = /<your-prefix> | |
274 |
|
278 | |||
275 | ## uncomment for https secure cookie |
|
279 | ## uncomment for https secure cookie | |
276 | beaker.session.secure = false |
|
280 | beaker.session.secure = false | |
277 |
|
281 | |||
278 | ## auto save the session to not to use .save() |
|
282 | ## auto save the session to not to use .save() | |
279 | beaker.session.auto = False |
|
283 | beaker.session.auto = False | |
280 |
|
284 | |||
281 | ## default cookie expiration time in seconds `true` expire at browser close ## |
|
285 | ## default cookie expiration time in seconds `true` expire at browser close ## | |
282 | #beaker.session.cookie_expires = 3600 |
|
286 | #beaker.session.cookie_expires = 3600 | |
283 |
|
287 | |||
284 |
|
288 | |||
285 | ############################ |
|
289 | ############################ | |
286 | ## ERROR HANDLING SYSTEMS ## |
|
290 | ## ERROR HANDLING SYSTEMS ## | |
287 | ############################ |
|
291 | ############################ | |
288 |
|
292 | |||
289 | #################### |
|
293 | #################### | |
290 | ### [errormator] ### |
|
294 | ### [errormator] ### | |
291 | #################### |
|
295 | #################### | |
292 |
|
296 | |||
293 | ## Errormator is tailored to work with RhodeCode, see |
|
297 | ## Errormator is tailored to work with RhodeCode, see | |
294 | ## http://errormator.com for details how to obtain an account |
|
298 | ## http://errormator.com for details how to obtain an account | |
295 | ## you must install python package `errormator_client` to make it work |
|
299 | ## you must install python package `errormator_client` to make it work | |
296 |
|
300 | |||
297 | ## errormator enabled |
|
301 | ## errormator enabled | |
298 | errormator = false |
|
302 | errormator = false | |
299 |
|
303 | |||
300 | errormator.server_url = https://api.errormator.com |
|
304 | errormator.server_url = https://api.errormator.com | |
301 | errormator.api_key = YOUR_API_KEY |
|
305 | errormator.api_key = YOUR_API_KEY | |
302 |
|
306 | |||
303 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
307 | ## TWEAK AMOUNT OF INFO SENT HERE | |
304 |
|
308 | |||
305 | ## enables 404 error logging (default False) |
|
309 | ## enables 404 error logging (default False) | |
306 | errormator.report_404 = false |
|
310 | errormator.report_404 = false | |
307 |
|
311 | |||
308 | ## time in seconds after request is considered being slow (default 1) |
|
312 | ## time in seconds after request is considered being slow (default 1) | |
309 | errormator.slow_request_time = 1 |
|
313 | errormator.slow_request_time = 1 | |
310 |
|
314 | |||
311 | ## record slow requests in application |
|
315 | ## record slow requests in application | |
312 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
316 | ## (needs to be enabled for slow datastore recording and time tracking) | |
313 | errormator.slow_requests = true |
|
317 | errormator.slow_requests = true | |
314 |
|
318 | |||
315 | ## enable hooking to application loggers |
|
319 | ## enable hooking to application loggers | |
316 | # errormator.logging = true |
|
320 | # errormator.logging = true | |
317 |
|
321 | |||
318 | ## minimum log level for log capture |
|
322 | ## minimum log level for log capture | |
319 | # errormator.logging.level = WARNING |
|
323 | # errormator.logging.level = WARNING | |
320 |
|
324 | |||
321 | ## send logs only from erroneous/slow requests |
|
325 | ## send logs only from erroneous/slow requests | |
322 | ## (saves API quota for intensive logging) |
|
326 | ## (saves API quota for intensive logging) | |
323 | errormator.logging_on_error = false |
|
327 | errormator.logging_on_error = false | |
324 |
|
328 | |||
325 | ## list of additonal keywords that should be grabbed from environ object |
|
329 | ## list of additonal keywords that should be grabbed from environ object | |
326 | ## can be string with comma separated list of words in lowercase |
|
330 | ## can be string with comma separated list of words in lowercase | |
327 | ## (by default client will always send following info: |
|
331 | ## (by default client will always send following info: | |
328 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
332 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that | |
329 | ## start with HTTP* this list be extended with additional keywords here |
|
333 | ## start with HTTP* this list be extended with additional keywords here | |
330 | errormator.environ_keys_whitelist = |
|
334 | errormator.environ_keys_whitelist = | |
331 |
|
335 | |||
332 |
|
336 | |||
333 | ## list of keywords that should be blanked from request object |
|
337 | ## list of keywords that should be blanked from request object | |
334 | ## can be string with comma separated list of words in lowercase |
|
338 | ## can be string with comma separated list of words in lowercase | |
335 | ## (by default client will always blank keys that contain following words |
|
339 | ## (by default client will always blank keys that contain following words | |
336 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
340 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' | |
337 | ## this list be extended with additional keywords set here |
|
341 | ## this list be extended with additional keywords set here | |
338 | errormator.request_keys_blacklist = |
|
342 | errormator.request_keys_blacklist = | |
339 |
|
343 | |||
340 |
|
344 | |||
341 | ## list of namespaces that should be ignores when gathering log entries |
|
345 | ## list of namespaces that should be ignores when gathering log entries | |
342 | ## can be string with comma separated list of namespaces |
|
346 | ## can be string with comma separated list of namespaces | |
343 | ## (by default the client ignores own entries: errormator_client.client) |
|
347 | ## (by default the client ignores own entries: errormator_client.client) | |
344 | errormator.log_namespace_blacklist = |
|
348 | errormator.log_namespace_blacklist = | |
345 |
|
349 | |||
346 |
|
350 | |||
347 | ################ |
|
351 | ################ | |
348 | ### [sentry] ### |
|
352 | ### [sentry] ### | |
349 | ################ |
|
353 | ################ | |
350 |
|
354 | |||
351 | ## sentry is a alternative open source error aggregator |
|
355 | ## sentry is a alternative open source error aggregator | |
352 | ## you must install python packages `sentry` and `raven` to enable |
|
356 | ## you must install python packages `sentry` and `raven` to enable | |
353 |
|
357 | |||
354 | sentry.dsn = YOUR_DNS |
|
358 | sentry.dsn = YOUR_DNS | |
355 | sentry.servers = |
|
359 | sentry.servers = | |
356 | sentry.name = |
|
360 | sentry.name = | |
357 | sentry.key = |
|
361 | sentry.key = | |
358 | sentry.public_key = |
|
362 | sentry.public_key = | |
359 | sentry.secret_key = |
|
363 | sentry.secret_key = | |
360 | sentry.project = |
|
364 | sentry.project = | |
361 | sentry.site = |
|
365 | sentry.site = | |
362 | sentry.include_paths = |
|
366 | sentry.include_paths = | |
363 | sentry.exclude_paths = |
|
367 | sentry.exclude_paths = | |
364 |
|
368 | |||
365 |
|
369 | |||
366 | ################################################################################ |
|
370 | ################################################################################ | |
367 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
371 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## | |
368 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
372 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## | |
369 | ## execute malicious code after an exception is raised. ## |
|
373 | ## execute malicious code after an exception is raised. ## | |
370 | ################################################################################ |
|
374 | ################################################################################ | |
371 | set debug = false |
|
375 | set debug = false | |
372 |
|
376 | |||
373 | ################################## |
|
377 | ################################## | |
374 | ### LOGVIEW CONFIG ### |
|
378 | ### LOGVIEW CONFIG ### | |
375 | ################################## |
|
379 | ################################## | |
376 | logview.sqlalchemy = #faa |
|
380 | logview.sqlalchemy = #faa | |
377 | logview.pylons.templating = #bfb |
|
381 | logview.pylons.templating = #bfb | |
378 | logview.pylons.util = #eee |
|
382 | logview.pylons.util = #eee | |
379 |
|
383 | |||
380 | ######################################################### |
|
384 | ######################################################### | |
381 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
385 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### | |
382 | ######################################################### |
|
386 | ######################################################### | |
383 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.sqlite |
|
387 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.sqlite | |
384 | #sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode_test |
|
388 | #sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode_test | |
385 | #sqlalchemy.db1.url = mysql://root:qwe@localhost/rhodecode_test |
|
389 | #sqlalchemy.db1.url = mysql://root:qwe@localhost/rhodecode_test | |
386 | sqlalchemy.db1.echo = false |
|
390 | sqlalchemy.db1.echo = false | |
387 | sqlalchemy.db1.pool_recycle = 3600 |
|
391 | sqlalchemy.db1.pool_recycle = 3600 | |
388 | sqlalchemy.db1.convert_unicode = true |
|
392 | sqlalchemy.db1.convert_unicode = true | |
389 |
|
393 | |||
390 | ################################ |
|
394 | ################################ | |
391 | ### LOGGING CONFIGURATION #### |
|
395 | ### LOGGING CONFIGURATION #### | |
392 | ################################ |
|
396 | ################################ | |
393 | [loggers] |
|
397 | [loggers] | |
394 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer |
|
398 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer | |
395 |
|
399 | |||
396 | [handlers] |
|
400 | [handlers] | |
397 | keys = console, console_sql |
|
401 | keys = console, console_sql | |
398 |
|
402 | |||
399 | [formatters] |
|
403 | [formatters] | |
400 | keys = generic, color_formatter, color_formatter_sql |
|
404 | keys = generic, color_formatter, color_formatter_sql | |
401 |
|
405 | |||
402 | ############# |
|
406 | ############# | |
403 | ## LOGGERS ## |
|
407 | ## LOGGERS ## | |
404 | ############# |
|
408 | ############# | |
405 | [logger_root] |
|
409 | [logger_root] | |
406 | level = DEBUG |
|
410 | level = DEBUG | |
407 | handlers = console |
|
411 | handlers = console | |
408 |
|
412 | |||
409 | [logger_routes] |
|
413 | [logger_routes] | |
410 | level = DEBUG |
|
414 | level = DEBUG | |
411 | handlers = |
|
415 | handlers = | |
412 | qualname = routes.middleware |
|
416 | qualname = routes.middleware | |
413 | ## "level = DEBUG" logs the route matched and routing variables. |
|
417 | ## "level = DEBUG" logs the route matched and routing variables. | |
414 | propagate = 1 |
|
418 | propagate = 1 | |
415 |
|
419 | |||
416 | [logger_beaker] |
|
420 | [logger_beaker] | |
417 | level = DEBUG |
|
421 | level = DEBUG | |
418 | handlers = |
|
422 | handlers = | |
419 | qualname = beaker.container |
|
423 | qualname = beaker.container | |
420 | propagate = 1 |
|
424 | propagate = 1 | |
421 |
|
425 | |||
422 | [logger_templates] |
|
426 | [logger_templates] | |
423 | level = INFO |
|
427 | level = INFO | |
424 | handlers = |
|
428 | handlers = | |
425 | qualname = pylons.templating |
|
429 | qualname = pylons.templating | |
426 | propagate = 1 |
|
430 | propagate = 1 | |
427 |
|
431 | |||
428 | [logger_rhodecode] |
|
432 | [logger_rhodecode] | |
429 | level = DEBUG |
|
433 | level = DEBUG | |
430 | handlers = |
|
434 | handlers = | |
431 | qualname = rhodecode |
|
435 | qualname = rhodecode | |
432 | propagate = 1 |
|
436 | propagate = 1 | |
433 |
|
437 | |||
434 | [logger_sqlalchemy] |
|
438 | [logger_sqlalchemy] | |
435 | level = ERROR |
|
439 | level = ERROR | |
436 | handlers = console |
|
440 | handlers = console | |
437 | qualname = sqlalchemy.engine |
|
441 | qualname = sqlalchemy.engine | |
438 | propagate = 0 |
|
442 | propagate = 0 | |
439 |
|
443 | |||
440 | [logger_whoosh_indexer] |
|
444 | [logger_whoosh_indexer] | |
441 | level = DEBUG |
|
445 | level = DEBUG | |
442 | handlers = |
|
446 | handlers = | |
443 | qualname = whoosh_indexer |
|
447 | qualname = whoosh_indexer | |
444 | propagate = 1 |
|
448 | propagate = 1 | |
445 |
|
449 | |||
446 | ############## |
|
450 | ############## | |
447 | ## HANDLERS ## |
|
451 | ## HANDLERS ## | |
448 | ############## |
|
452 | ############## | |
449 |
|
453 | |||
450 | [handler_console] |
|
454 | [handler_console] | |
451 | class = StreamHandler |
|
455 | class = StreamHandler | |
452 | args = (sys.stderr,) |
|
456 | args = (sys.stderr,) | |
453 | level = NOTSET |
|
457 | level = NOTSET | |
454 | formatter = generic |
|
458 | formatter = generic | |
455 |
|
459 | |||
456 | [handler_console_sql] |
|
460 | [handler_console_sql] | |
457 | class = StreamHandler |
|
461 | class = StreamHandler | |
458 | args = (sys.stderr,) |
|
462 | args = (sys.stderr,) | |
459 | level = WARN |
|
463 | level = WARN | |
460 | formatter = generic |
|
464 | formatter = generic | |
461 |
|
465 | |||
462 | ################ |
|
466 | ################ | |
463 | ## FORMATTERS ## |
|
467 | ## FORMATTERS ## | |
464 | ################ |
|
468 | ################ | |
465 |
|
469 | |||
466 | [formatter_generic] |
|
470 | [formatter_generic] | |
467 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
471 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
468 | datefmt = %Y-%m-%d %H:%M:%S |
|
472 | datefmt = %Y-%m-%d %H:%M:%S | |
469 |
|
473 | |||
470 | [formatter_color_formatter] |
|
474 | [formatter_color_formatter] | |
471 | class=rhodecode.lib.colored_formatter.ColorFormatter |
|
475 | class=rhodecode.lib.colored_formatter.ColorFormatter | |
472 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
476 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
473 | datefmt = %Y-%m-%d %H:%M:%S |
|
477 | datefmt = %Y-%m-%d %H:%M:%S | |
474 |
|
478 | |||
475 | [formatter_color_formatter_sql] |
|
479 | [formatter_color_formatter_sql] | |
476 | class=rhodecode.lib.colored_formatter.ColorFormatterSql |
|
480 | class=rhodecode.lib.colored_formatter.ColorFormatterSql | |
477 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
481 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s | |
478 | datefmt = %Y-%m-%d %H:%M:%S |
|
482 | datefmt = %Y-%m-%d %H:%M:%S |
General Comments 0
You need to be logged in to leave comments.
Login now