##// END OF EJS Templates
diffs: use whole chunk diff to calculate if it's oversized or not....
dan -
r2070:7939c6bf default
parent child Browse files
Show More
@@ -1,733 +1,739 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 use = egg:waitress#main
54 use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 threads = 5
56 threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 max_request_body_size = 107374182400
58 max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 asyncore_use_poll = true
61 asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 #use = egg:gunicorn#main
69 #use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 #workers = 2
74 #workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommened to be at 1
76 ## generally recommened to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 #proc_name = rhodecode
79 #proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 #worker_class = sync
82 #worker_class = sync
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 #max_requests = 1000
87 #max_requests = 1000
88 #max_requests_jitter = 30
88 #max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 #timeout = 21600
91 #timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 # During development the we want to have the debug toolbar enabled
111 # During development the we want to have the debug toolbar enabled
112 pyramid.includes =
112 pyramid.includes =
113 pyramid_debugtoolbar
113 pyramid_debugtoolbar
114 rhodecode.utils.debugtoolbar
114 rhodecode.utils.debugtoolbar
115 rhodecode.lib.middleware.request_wrapper
115 rhodecode.lib.middleware.request_wrapper
116
116
117 pyramid.reload_templates = true
117 pyramid.reload_templates = true
118
118
119 debugtoolbar.hosts = 0.0.0.0/0
119 debugtoolbar.hosts = 0.0.0.0/0
120 debugtoolbar.exclude_prefixes =
120 debugtoolbar.exclude_prefixes =
121 /css
121 /css
122 /fonts
122 /fonts
123 /images
123 /images
124 /js
124 /js
125
125
126 ## RHODECODE PLUGINS ##
126 ## RHODECODE PLUGINS ##
127 rhodecode.includes =
127 rhodecode.includes =
128 rhodecode.api
128 rhodecode.api
129
129
130
130
131 # api prefix url
131 # api prefix url
132 rhodecode.api.url = /_admin/api
132 rhodecode.api.url = /_admin/api
133
133
134
134
135 ## END RHODECODE PLUGINS ##
135 ## END RHODECODE PLUGINS ##
136
136
137 ## encryption key used to encrypt social plugin tokens,
137 ## encryption key used to encrypt social plugin tokens,
138 ## remote_urls with credentials etc, if not set it defaults to
138 ## remote_urls with credentials etc, if not set it defaults to
139 ## `beaker.session.secret`
139 ## `beaker.session.secret`
140 #rhodecode.encrypted_values.secret =
140 #rhodecode.encrypted_values.secret =
141
141
142 ## decryption strict mode (enabled by default). It controls if decryption raises
142 ## decryption strict mode (enabled by default). It controls if decryption raises
143 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
144 #rhodecode.encrypted_values.strict = false
144 #rhodecode.encrypted_values.strict = false
145
145
146 ## return gzipped responses from Rhodecode (static files/application)
146 ## return gzipped responses from Rhodecode (static files/application)
147 gzip_responses = false
147 gzip_responses = false
148
148
149 ## autogenerate javascript routes file on startup
149 ## autogenerate javascript routes file on startup
150 generate_js_files = false
150 generate_js_files = false
151
151
152 ## Optional Languages
152 ## Optional Languages
153 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
153 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
154 lang = en
154 lang = en
155
155
156 ## perform a full repository scan on each server start, this should be
156 ## perform a full repository scan on each server start, this should be
157 ## set to false after first startup, to allow faster server restarts.
157 ## set to false after first startup, to allow faster server restarts.
158 startup.import_repos = false
158 startup.import_repos = false
159
159
160 ## Uncomment and set this path to use archive download cache.
160 ## Uncomment and set this path to use archive download cache.
161 ## Once enabled, generated archives will be cached at this location
161 ## Once enabled, generated archives will be cached at this location
162 ## and served from the cache during subsequent requests for the same archive of
162 ## and served from the cache during subsequent requests for the same archive of
163 ## the repository.
163 ## the repository.
164 #archive_cache_dir = /tmp/tarballcache
164 #archive_cache_dir = /tmp/tarballcache
165
165
166 ## change this to unique ID for security
166 ## change this to unique ID for security
167 app_instance_uuid = rc-production
167 app_instance_uuid = rc-production
168
168
169 ## cut off limit for large diffs (size in bytes)
169 ## cut off limit for large diffs (size in bytes). If overall diff size on
170 cut_off_limit_diff = 1024000
170 ## commit, or pull request exceeds this limit this diff will be displayed
171 cut_off_limit_file = 256000
171 ## partially. E.g 512000 == 512Kb
172 cut_off_limit_diff = 512000
173
174 ## cut off limit for large files inside diffs (size in bytes). Each individual
175 ## file inside diff which exceeds this limit will be displayed partially.
176 ## E.g 128000 == 128Kb
177 cut_off_limit_file = 128000
172
178
173 ## use cache version of scm repo everywhere
179 ## use cache version of scm repo everywhere
174 vcs_full_cache = true
180 vcs_full_cache = true
175
181
176 ## force https in RhodeCode, fixes https redirects, assumes it's always https
182 ## force https in RhodeCode, fixes https redirects, assumes it's always https
177 ## Normally this is controlled by proper http flags sent from http server
183 ## Normally this is controlled by proper http flags sent from http server
178 force_https = false
184 force_https = false
179
185
180 ## use Strict-Transport-Security headers
186 ## use Strict-Transport-Security headers
181 use_htsts = false
187 use_htsts = false
182
188
183 ## number of commits stats will parse on each iteration
189 ## number of commits stats will parse on each iteration
184 commit_parse_limit = 25
190 commit_parse_limit = 25
185
191
186 ## git rev filter option, --all is the default filter, if you need to
192 ## git rev filter option, --all is the default filter, if you need to
187 ## hide all refs in changelog switch this to --branches --tags
193 ## hide all refs in changelog switch this to --branches --tags
188 git_rev_filter = --branches --tags
194 git_rev_filter = --branches --tags
189
195
190 # Set to true if your repos are exposed using the dumb protocol
196 # Set to true if your repos are exposed using the dumb protocol
191 git_update_server_info = false
197 git_update_server_info = false
192
198
193 ## RSS/ATOM feed options
199 ## RSS/ATOM feed options
194 rss_cut_off_limit = 256000
200 rss_cut_off_limit = 256000
195 rss_items_per_page = 10
201 rss_items_per_page = 10
196 rss_include_diff = false
202 rss_include_diff = false
197
203
198 ## gist URL alias, used to create nicer urls for gist. This should be an
204 ## gist URL alias, used to create nicer urls for gist. This should be an
199 ## url that does rewrites to _admin/gists/{gistid}.
205 ## url that does rewrites to _admin/gists/{gistid}.
200 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
206 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
201 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
207 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
202 gist_alias_url =
208 gist_alias_url =
203
209
204 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
210 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
205 ## used for access.
211 ## used for access.
206 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
212 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
207 ## came from the the logged in user who own this authentication token.
213 ## came from the the logged in user who own this authentication token.
208 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
214 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
209 ## authentication token. Such view would be only accessible when used together
215 ## authentication token. Such view would be only accessible when used together
210 ## with this authentication token
216 ## with this authentication token
211 ##
217 ##
212 ## list of all views can be found under `/_admin/permissions/auth_token_access`
218 ## list of all views can be found under `/_admin/permissions/auth_token_access`
213 ## The list should be "," separated and on a single line.
219 ## The list should be "," separated and on a single line.
214 ##
220 ##
215 ## Most common views to enable:
221 ## Most common views to enable:
216 # RepoCommitsView:repo_commit_download
222 # RepoCommitsView:repo_commit_download
217 # RepoCommitsView:repo_commit_patch
223 # RepoCommitsView:repo_commit_patch
218 # RepoCommitsView:repo_commit_raw
224 # RepoCommitsView:repo_commit_raw
219 # RepoCommitsView:repo_commit_raw@TOKEN
225 # RepoCommitsView:repo_commit_raw@TOKEN
220 # RepoFilesView:repo_files_diff
226 # RepoFilesView:repo_files_diff
221 # RepoFilesView:repo_archivefile
227 # RepoFilesView:repo_archivefile
222 # RepoFilesView:repo_file_raw
228 # RepoFilesView:repo_file_raw
223 # GistView:*
229 # GistView:*
224 api_access_controllers_whitelist =
230 api_access_controllers_whitelist =
225
231
226 ## default encoding used to convert from and to unicode
232 ## default encoding used to convert from and to unicode
227 ## can be also a comma separated list of encoding in case of mixed encodings
233 ## can be also a comma separated list of encoding in case of mixed encodings
228 default_encoding = UTF-8
234 default_encoding = UTF-8
229
235
230 ## instance-id prefix
236 ## instance-id prefix
231 ## a prefix key for this instance used for cache invalidation when running
237 ## a prefix key for this instance used for cache invalidation when running
232 ## multiple instances of rhodecode, make sure it's globally unique for
238 ## multiple instances of rhodecode, make sure it's globally unique for
233 ## all running rhodecode instances. Leave empty if you don't use it
239 ## all running rhodecode instances. Leave empty if you don't use it
234 instance_id =
240 instance_id =
235
241
236 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
242 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
237 ## of an authentication plugin also if it is disabled by it's settings.
243 ## of an authentication plugin also if it is disabled by it's settings.
238 ## This could be useful if you are unable to log in to the system due to broken
244 ## This could be useful if you are unable to log in to the system due to broken
239 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
245 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
240 ## module to log in again and fix the settings.
246 ## module to log in again and fix the settings.
241 ##
247 ##
242 ## Available builtin plugin IDs (hash is part of the ID):
248 ## Available builtin plugin IDs (hash is part of the ID):
243 ## egg:rhodecode-enterprise-ce#rhodecode
249 ## egg:rhodecode-enterprise-ce#rhodecode
244 ## egg:rhodecode-enterprise-ce#pam
250 ## egg:rhodecode-enterprise-ce#pam
245 ## egg:rhodecode-enterprise-ce#ldap
251 ## egg:rhodecode-enterprise-ce#ldap
246 ## egg:rhodecode-enterprise-ce#jasig_cas
252 ## egg:rhodecode-enterprise-ce#jasig_cas
247 ## egg:rhodecode-enterprise-ce#headers
253 ## egg:rhodecode-enterprise-ce#headers
248 ## egg:rhodecode-enterprise-ce#crowd
254 ## egg:rhodecode-enterprise-ce#crowd
249 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
255 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
250
256
251 ## alternative return HTTP header for failed authentication. Default HTTP
257 ## alternative return HTTP header for failed authentication. Default HTTP
252 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
258 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
253 ## handling that causing a series of failed authentication calls.
259 ## handling that causing a series of failed authentication calls.
254 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
260 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
255 ## This will be served instead of default 401 on bad authnetication
261 ## This will be served instead of default 401 on bad authnetication
256 auth_ret_code =
262 auth_ret_code =
257
263
258 ## use special detection method when serving auth_ret_code, instead of serving
264 ## use special detection method when serving auth_ret_code, instead of serving
259 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
265 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
260 ## and then serve auth_ret_code to clients
266 ## and then serve auth_ret_code to clients
261 auth_ret_code_detection = false
267 auth_ret_code_detection = false
262
268
263 ## locking return code. When repository is locked return this HTTP code. 2XX
269 ## locking return code. When repository is locked return this HTTP code. 2XX
264 ## codes don't break the transactions while 4XX codes do
270 ## codes don't break the transactions while 4XX codes do
265 lock_ret_code = 423
271 lock_ret_code = 423
266
272
267 ## allows to change the repository location in settings page
273 ## allows to change the repository location in settings page
268 allow_repo_location_change = true
274 allow_repo_location_change = true
269
275
270 ## allows to setup custom hooks in settings page
276 ## allows to setup custom hooks in settings page
271 allow_custom_hooks_settings = true
277 allow_custom_hooks_settings = true
272
278
273 ## generated license token, goto license page in RhodeCode settings to obtain
279 ## generated license token, goto license page in RhodeCode settings to obtain
274 ## new token
280 ## new token
275 license_token =
281 license_token =
276
282
277 ## supervisor connection uri, for managing supervisor and logs.
283 ## supervisor connection uri, for managing supervisor and logs.
278 supervisor.uri =
284 supervisor.uri =
279 ## supervisord group name/id we only want this RC instance to handle
285 ## supervisord group name/id we only want this RC instance to handle
280 supervisor.group_id = dev
286 supervisor.group_id = dev
281
287
282 ## Display extended labs settings
288 ## Display extended labs settings
283 labs_settings_active = true
289 labs_settings_active = true
284
290
285 ####################################
291 ####################################
286 ### CELERY CONFIG ####
292 ### CELERY CONFIG ####
287 ####################################
293 ####################################
288 use_celery = false
294 use_celery = false
289 broker.host = localhost
295 broker.host = localhost
290 broker.vhost = rabbitmqhost
296 broker.vhost = rabbitmqhost
291 broker.port = 5672
297 broker.port = 5672
292 broker.user = rabbitmq
298 broker.user = rabbitmq
293 broker.password = qweqwe
299 broker.password = qweqwe
294
300
295 celery.imports = rhodecode.lib.celerylib.tasks
301 celery.imports = rhodecode.lib.celerylib.tasks
296
302
297 celery.result.backend = amqp
303 celery.result.backend = amqp
298 celery.result.dburi = amqp://
304 celery.result.dburi = amqp://
299 celery.result.serialier = json
305 celery.result.serialier = json
300
306
301 #celery.send.task.error.emails = true
307 #celery.send.task.error.emails = true
302 #celery.amqp.task.result.expires = 18000
308 #celery.amqp.task.result.expires = 18000
303
309
304 celeryd.concurrency = 2
310 celeryd.concurrency = 2
305 #celeryd.log.file = celeryd.log
311 #celeryd.log.file = celeryd.log
306 celeryd.log.level = debug
312 celeryd.log.level = debug
307 celeryd.max.tasks.per.child = 1
313 celeryd.max.tasks.per.child = 1
308
314
309 ## tasks will never be sent to the queue, but executed locally instead.
315 ## tasks will never be sent to the queue, but executed locally instead.
310 celery.always.eager = false
316 celery.always.eager = false
311
317
312 ####################################
318 ####################################
313 ### BEAKER CACHE ####
319 ### BEAKER CACHE ####
314 ####################################
320 ####################################
315 # default cache dir for templates. Putting this into a ramdisk
321 # default cache dir for templates. Putting this into a ramdisk
316 ## can boost performance, eg. %(here)s/data_ramdisk
322 ## can boost performance, eg. %(here)s/data_ramdisk
317 cache_dir = %(here)s/data
323 cache_dir = %(here)s/data
318
324
319 ## locking and default file storage for Beaker. Putting this into a ramdisk
325 ## locking and default file storage for Beaker. Putting this into a ramdisk
320 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
326 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
321 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
327 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
322 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
328 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
323
329
324 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
330 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
325
331
326 beaker.cache.super_short_term.type = memory
332 beaker.cache.super_short_term.type = memory
327 beaker.cache.super_short_term.expire = 10
333 beaker.cache.super_short_term.expire = 10
328 beaker.cache.super_short_term.key_length = 256
334 beaker.cache.super_short_term.key_length = 256
329
335
330 beaker.cache.short_term.type = memory
336 beaker.cache.short_term.type = memory
331 beaker.cache.short_term.expire = 60
337 beaker.cache.short_term.expire = 60
332 beaker.cache.short_term.key_length = 256
338 beaker.cache.short_term.key_length = 256
333
339
334 beaker.cache.long_term.type = memory
340 beaker.cache.long_term.type = memory
335 beaker.cache.long_term.expire = 36000
341 beaker.cache.long_term.expire = 36000
336 beaker.cache.long_term.key_length = 256
342 beaker.cache.long_term.key_length = 256
337
343
338 beaker.cache.sql_cache_short.type = memory
344 beaker.cache.sql_cache_short.type = memory
339 beaker.cache.sql_cache_short.expire = 10
345 beaker.cache.sql_cache_short.expire = 10
340 beaker.cache.sql_cache_short.key_length = 256
346 beaker.cache.sql_cache_short.key_length = 256
341
347
342 ## default is memory cache, configure only if required
348 ## default is memory cache, configure only if required
343 ## using multi-node or multi-worker setup
349 ## using multi-node or multi-worker setup
344 #beaker.cache.auth_plugins.type = ext:database
350 #beaker.cache.auth_plugins.type = ext:database
345 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
351 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
346 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
352 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
347 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
353 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
348 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
354 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
349 #beaker.cache.auth_plugins.sa.pool_size = 10
355 #beaker.cache.auth_plugins.sa.pool_size = 10
350 #beaker.cache.auth_plugins.sa.max_overflow = 0
356 #beaker.cache.auth_plugins.sa.max_overflow = 0
351
357
352 beaker.cache.repo_cache_long.type = memorylru_base
358 beaker.cache.repo_cache_long.type = memorylru_base
353 beaker.cache.repo_cache_long.max_items = 4096
359 beaker.cache.repo_cache_long.max_items = 4096
354 beaker.cache.repo_cache_long.expire = 2592000
360 beaker.cache.repo_cache_long.expire = 2592000
355
361
356 ## default is memorylru_base cache, configure only if required
362 ## default is memorylru_base cache, configure only if required
357 ## using multi-node or multi-worker setup
363 ## using multi-node or multi-worker setup
358 #beaker.cache.repo_cache_long.type = ext:memcached
364 #beaker.cache.repo_cache_long.type = ext:memcached
359 #beaker.cache.repo_cache_long.url = localhost:11211
365 #beaker.cache.repo_cache_long.url = localhost:11211
360 #beaker.cache.repo_cache_long.expire = 1209600
366 #beaker.cache.repo_cache_long.expire = 1209600
361 #beaker.cache.repo_cache_long.key_length = 256
367 #beaker.cache.repo_cache_long.key_length = 256
362
368
363 ####################################
369 ####################################
364 ### BEAKER SESSION ####
370 ### BEAKER SESSION ####
365 ####################################
371 ####################################
366
372
367 ## .session.type is type of storage options for the session, current allowed
373 ## .session.type is type of storage options for the session, current allowed
368 ## types are file, ext:memcached, ext:database, and memory (default).
374 ## types are file, ext:memcached, ext:database, and memory (default).
369 beaker.session.type = file
375 beaker.session.type = file
370 beaker.session.data_dir = %(here)s/data/sessions/data
376 beaker.session.data_dir = %(here)s/data/sessions/data
371
377
372 ## db based session, fast, and allows easy management over logged in users
378 ## db based session, fast, and allows easy management over logged in users
373 #beaker.session.type = ext:database
379 #beaker.session.type = ext:database
374 #beaker.session.table_name = db_session
380 #beaker.session.table_name = db_session
375 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
381 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
376 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
382 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
377 #beaker.session.sa.pool_recycle = 3600
383 #beaker.session.sa.pool_recycle = 3600
378 #beaker.session.sa.echo = false
384 #beaker.session.sa.echo = false
379
385
380 beaker.session.key = rhodecode
386 beaker.session.key = rhodecode
381 beaker.session.secret = develop-rc-uytcxaz
387 beaker.session.secret = develop-rc-uytcxaz
382 beaker.session.lock_dir = %(here)s/data/sessions/lock
388 beaker.session.lock_dir = %(here)s/data/sessions/lock
383
389
384 ## Secure encrypted cookie. Requires AES and AES python libraries
390 ## Secure encrypted cookie. Requires AES and AES python libraries
385 ## you must disable beaker.session.secret to use this
391 ## you must disable beaker.session.secret to use this
386 #beaker.session.encrypt_key = key_for_encryption
392 #beaker.session.encrypt_key = key_for_encryption
387 #beaker.session.validate_key = validation_key
393 #beaker.session.validate_key = validation_key
388
394
389 ## sets session as invalid(also logging out user) if it haven not been
395 ## sets session as invalid(also logging out user) if it haven not been
390 ## accessed for given amount of time in seconds
396 ## accessed for given amount of time in seconds
391 beaker.session.timeout = 2592000
397 beaker.session.timeout = 2592000
392 beaker.session.httponly = true
398 beaker.session.httponly = true
393 ## Path to use for the cookie. Set to prefix if you use prefix middleware
399 ## Path to use for the cookie. Set to prefix if you use prefix middleware
394 #beaker.session.cookie_path = /custom_prefix
400 #beaker.session.cookie_path = /custom_prefix
395
401
396 ## uncomment for https secure cookie
402 ## uncomment for https secure cookie
397 beaker.session.secure = false
403 beaker.session.secure = false
398
404
399 ## auto save the session to not to use .save()
405 ## auto save the session to not to use .save()
400 beaker.session.auto = false
406 beaker.session.auto = false
401
407
402 ## default cookie expiration time in seconds, set to `true` to set expire
408 ## default cookie expiration time in seconds, set to `true` to set expire
403 ## at browser close
409 ## at browser close
404 #beaker.session.cookie_expires = 3600
410 #beaker.session.cookie_expires = 3600
405
411
406 ###################################
412 ###################################
407 ## SEARCH INDEXING CONFIGURATION ##
413 ## SEARCH INDEXING CONFIGURATION ##
408 ###################################
414 ###################################
409 ## Full text search indexer is available in rhodecode-tools under
415 ## Full text search indexer is available in rhodecode-tools under
410 ## `rhodecode-tools index` command
416 ## `rhodecode-tools index` command
411
417
412 ## WHOOSH Backend, doesn't require additional services to run
418 ## WHOOSH Backend, doesn't require additional services to run
413 ## it works good with few dozen repos
419 ## it works good with few dozen repos
414 search.module = rhodecode.lib.index.whoosh
420 search.module = rhodecode.lib.index.whoosh
415 search.location = %(here)s/data/index
421 search.location = %(here)s/data/index
416
422
417 ########################################
423 ########################################
418 ### CHANNELSTREAM CONFIG ####
424 ### CHANNELSTREAM CONFIG ####
419 ########################################
425 ########################################
420 ## channelstream enables persistent connections and live notification
426 ## channelstream enables persistent connections and live notification
421 ## in the system. It's also used by the chat system
427 ## in the system. It's also used by the chat system
422 channelstream.enabled = false
428 channelstream.enabled = false
423
429
424 ## server address for channelstream server on the backend
430 ## server address for channelstream server on the backend
425 channelstream.server = 127.0.0.1:9800
431 channelstream.server = 127.0.0.1:9800
426
432
427 ## location of the channelstream server from outside world
433 ## location of the channelstream server from outside world
428 ## use ws:// for http or wss:// for https. This address needs to be handled
434 ## use ws:// for http or wss:// for https. This address needs to be handled
429 ## by external HTTP server such as Nginx or Apache
435 ## by external HTTP server such as Nginx or Apache
430 ## see nginx/apache configuration examples in our docs
436 ## see nginx/apache configuration examples in our docs
431 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
437 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
432 channelstream.secret = secret
438 channelstream.secret = secret
433 channelstream.history.location = %(here)s/channelstream_history
439 channelstream.history.location = %(here)s/channelstream_history
434
440
435 ## Internal application path that Javascript uses to connect into.
441 ## Internal application path that Javascript uses to connect into.
436 ## If you use proxy-prefix the prefix should be added before /_channelstream
442 ## If you use proxy-prefix the prefix should be added before /_channelstream
437 channelstream.proxy_path = /_channelstream
443 channelstream.proxy_path = /_channelstream
438
444
439
445
440 ###################################
446 ###################################
441 ## APPENLIGHT CONFIG ##
447 ## APPENLIGHT CONFIG ##
442 ###################################
448 ###################################
443
449
444 ## Appenlight is tailored to work with RhodeCode, see
450 ## Appenlight is tailored to work with RhodeCode, see
445 ## http://appenlight.com for details how to obtain an account
451 ## http://appenlight.com for details how to obtain an account
446
452
447 ## appenlight integration enabled
453 ## appenlight integration enabled
448 appenlight = false
454 appenlight = false
449
455
450 appenlight.server_url = https://api.appenlight.com
456 appenlight.server_url = https://api.appenlight.com
451 appenlight.api_key = YOUR_API_KEY
457 appenlight.api_key = YOUR_API_KEY
452 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
458 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
453
459
454 # used for JS client
460 # used for JS client
455 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
461 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
456
462
457 ## TWEAK AMOUNT OF INFO SENT HERE
463 ## TWEAK AMOUNT OF INFO SENT HERE
458
464
459 ## enables 404 error logging (default False)
465 ## enables 404 error logging (default False)
460 appenlight.report_404 = false
466 appenlight.report_404 = false
461
467
462 ## time in seconds after request is considered being slow (default 1)
468 ## time in seconds after request is considered being slow (default 1)
463 appenlight.slow_request_time = 1
469 appenlight.slow_request_time = 1
464
470
465 ## record slow requests in application
471 ## record slow requests in application
466 ## (needs to be enabled for slow datastore recording and time tracking)
472 ## (needs to be enabled for slow datastore recording and time tracking)
467 appenlight.slow_requests = true
473 appenlight.slow_requests = true
468
474
469 ## enable hooking to application loggers
475 ## enable hooking to application loggers
470 appenlight.logging = true
476 appenlight.logging = true
471
477
472 ## minimum log level for log capture
478 ## minimum log level for log capture
473 appenlight.logging.level = WARNING
479 appenlight.logging.level = WARNING
474
480
475 ## send logs only from erroneous/slow requests
481 ## send logs only from erroneous/slow requests
476 ## (saves API quota for intensive logging)
482 ## (saves API quota for intensive logging)
477 appenlight.logging_on_error = false
483 appenlight.logging_on_error = false
478
484
479 ## list of additonal keywords that should be grabbed from environ object
485 ## list of additonal keywords that should be grabbed from environ object
480 ## can be string with comma separated list of words in lowercase
486 ## can be string with comma separated list of words in lowercase
481 ## (by default client will always send following info:
487 ## (by default client will always send following info:
482 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
488 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
483 ## start with HTTP* this list be extended with additional keywords here
489 ## start with HTTP* this list be extended with additional keywords here
484 appenlight.environ_keys_whitelist =
490 appenlight.environ_keys_whitelist =
485
491
486 ## list of keywords that should be blanked from request object
492 ## list of keywords that should be blanked from request object
487 ## can be string with comma separated list of words in lowercase
493 ## can be string with comma separated list of words in lowercase
488 ## (by default client will always blank keys that contain following words
494 ## (by default client will always blank keys that contain following words
489 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
495 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
490 ## this list be extended with additional keywords set here
496 ## this list be extended with additional keywords set here
491 appenlight.request_keys_blacklist =
497 appenlight.request_keys_blacklist =
492
498
493 ## list of namespaces that should be ignores when gathering log entries
499 ## list of namespaces that should be ignores when gathering log entries
494 ## can be string with comma separated list of namespaces
500 ## can be string with comma separated list of namespaces
495 ## (by default the client ignores own entries: appenlight_client.client)
501 ## (by default the client ignores own entries: appenlight_client.client)
496 appenlight.log_namespace_blacklist =
502 appenlight.log_namespace_blacklist =
497
503
498
504
499 ################################################################################
505 ################################################################################
500 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
506 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
501 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
507 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
502 ## execute malicious code after an exception is raised. ##
508 ## execute malicious code after an exception is raised. ##
503 ################################################################################
509 ################################################################################
504 #set debug = false
510 #set debug = false
505
511
506
512
507 ##############
513 ##############
508 ## STYLING ##
514 ## STYLING ##
509 ##############
515 ##############
510 debug_style = true
516 debug_style = true
511
517
512 ###########################################
518 ###########################################
513 ### MAIN RHODECODE DATABASE CONFIG ###
519 ### MAIN RHODECODE DATABASE CONFIG ###
514 ###########################################
520 ###########################################
515 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
521 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
516 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
522 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
517 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
523 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
518 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
524 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
519
525
520 # see sqlalchemy docs for other advanced settings
526 # see sqlalchemy docs for other advanced settings
521
527
522 ## print the sql statements to output
528 ## print the sql statements to output
523 sqlalchemy.db1.echo = false
529 sqlalchemy.db1.echo = false
524 ## recycle the connections after this amount of seconds
530 ## recycle the connections after this amount of seconds
525 sqlalchemy.db1.pool_recycle = 3600
531 sqlalchemy.db1.pool_recycle = 3600
526 sqlalchemy.db1.convert_unicode = true
532 sqlalchemy.db1.convert_unicode = true
527
533
528 ## the number of connections to keep open inside the connection pool.
534 ## the number of connections to keep open inside the connection pool.
529 ## 0 indicates no limit
535 ## 0 indicates no limit
530 #sqlalchemy.db1.pool_size = 5
536 #sqlalchemy.db1.pool_size = 5
531
537
532 ## the number of connections to allow in connection pool "overflow", that is
538 ## the number of connections to allow in connection pool "overflow", that is
533 ## connections that can be opened above and beyond the pool_size setting,
539 ## connections that can be opened above and beyond the pool_size setting,
534 ## which defaults to five.
540 ## which defaults to five.
535 #sqlalchemy.db1.max_overflow = 10
541 #sqlalchemy.db1.max_overflow = 10
536
542
537
543
538 ##################
544 ##################
539 ### VCS CONFIG ###
545 ### VCS CONFIG ###
540 ##################
546 ##################
541 vcs.server.enable = true
547 vcs.server.enable = true
542 vcs.server = localhost:9900
548 vcs.server = localhost:9900
543
549
544 ## Web server connectivity protocol, responsible for web based VCS operatations
550 ## Web server connectivity protocol, responsible for web based VCS operatations
545 ## Available protocols are:
551 ## Available protocols are:
546 ## `http` - use http-rpc backend (default)
552 ## `http` - use http-rpc backend (default)
547 vcs.server.protocol = http
553 vcs.server.protocol = http
548
554
549 ## Push/Pull operations protocol, available options are:
555 ## Push/Pull operations protocol, available options are:
550 ## `http` - use http-rpc backend (default)
556 ## `http` - use http-rpc backend (default)
551 ##
557 ##
552 vcs.scm_app_implementation = http
558 vcs.scm_app_implementation = http
553
559
554 ## Push/Pull operations hooks protocol, available options are:
560 ## Push/Pull operations hooks protocol, available options are:
555 ## `http` - use http-rpc backend (default)
561 ## `http` - use http-rpc backend (default)
556 vcs.hooks.protocol = http
562 vcs.hooks.protocol = http
557
563
558 vcs.server.log_level = debug
564 vcs.server.log_level = debug
559 ## Start VCSServer with this instance as a subprocess, usefull for development
565 ## Start VCSServer with this instance as a subprocess, usefull for development
560 vcs.start_server = true
566 vcs.start_server = true
561
567
562 ## List of enabled VCS backends, available options are:
568 ## List of enabled VCS backends, available options are:
563 ## `hg` - mercurial
569 ## `hg` - mercurial
564 ## `git` - git
570 ## `git` - git
565 ## `svn` - subversion
571 ## `svn` - subversion
566 vcs.backends = hg, git, svn
572 vcs.backends = hg, git, svn
567
573
568 vcs.connection_timeout = 3600
574 vcs.connection_timeout = 3600
569 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
575 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
570 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
576 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
571 #vcs.svn.compatible_version = pre-1.8-compatible
577 #vcs.svn.compatible_version = pre-1.8-compatible
572
578
573
579
574 ############################################################
580 ############################################################
575 ### Subversion proxy support (mod_dav_svn) ###
581 ### Subversion proxy support (mod_dav_svn) ###
576 ### Maps RhodeCode repo groups into SVN paths for Apache ###
582 ### Maps RhodeCode repo groups into SVN paths for Apache ###
577 ############################################################
583 ############################################################
578 ## Enable or disable the config file generation.
584 ## Enable or disable the config file generation.
579 svn.proxy.generate_config = false
585 svn.proxy.generate_config = false
580 ## Generate config file with `SVNListParentPath` set to `On`.
586 ## Generate config file with `SVNListParentPath` set to `On`.
581 svn.proxy.list_parent_path = true
587 svn.proxy.list_parent_path = true
582 ## Set location and file name of generated config file.
588 ## Set location and file name of generated config file.
583 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
589 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
584 ## Used as a prefix to the `Location` block in the generated config file.
590 ## Used as a prefix to the `Location` block in the generated config file.
585 ## In most cases it should be set to `/`.
591 ## In most cases it should be set to `/`.
586 svn.proxy.location_root = /
592 svn.proxy.location_root = /
587 ## Command to reload the mod dav svn configuration on change.
593 ## Command to reload the mod dav svn configuration on change.
588 ## Example: `/etc/init.d/apache2 reload`
594 ## Example: `/etc/init.d/apache2 reload`
589 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
595 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
590 ## If the timeout expires before the reload command finishes, the command will
596 ## If the timeout expires before the reload command finishes, the command will
591 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
597 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
592 #svn.proxy.reload_timeout = 10
598 #svn.proxy.reload_timeout = 10
593
599
594 ############################################################
600 ############################################################
595 ### SSH Support Settings ###
601 ### SSH Support Settings ###
596 ############################################################
602 ############################################################
597
603
598 ## Defines if the authorized_keys file should be written on any change of
604 ## Defines if the authorized_keys file should be written on any change of
599 ## user ssh keys, setting this to false also disables posibility of adding
605 ## user ssh keys, setting this to false also disables posibility of adding
600 ## ssh keys for users from web interface.
606 ## ssh keys for users from web interface.
601 ssh.generate_authorized_keyfile = false
607 ssh.generate_authorized_keyfile = false
602
608
603 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
609 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
604 # ssh.authorized_keys_ssh_opts =
610 # ssh.authorized_keys_ssh_opts =
605
611
606 ## File to generate the authorized keys together with options
612 ## File to generate the authorized keys together with options
607 ## It is possible to have multiple key files specified in `sshd_config` e.g.
613 ## It is possible to have multiple key files specified in `sshd_config` e.g.
608 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
614 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
609 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
615 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
610
616
611 ## Command to execute the SSH wrapper. The binary is available in the
617 ## Command to execute the SSH wrapper. The binary is available in the
612 ## rhodecode installation directory.
618 ## rhodecode installation directory.
613 ## e.g ~/.rccontrol/community-1/profile/bin/rcssh-wrapper
619 ## e.g ~/.rccontrol/community-1/profile/bin/rcssh-wrapper
614 ssh.wrapper_cmd = ~/.rccontrol/community-1/rcssh-wrapper
620 ssh.wrapper_cmd = ~/.rccontrol/community-1/rcssh-wrapper
615
621
616 ## Allow shell when executing the ssh-wrapper command
622 ## Allow shell when executing the ssh-wrapper command
617 ssh.wrapper_cmd_allow_shell = false
623 ssh.wrapper_cmd_allow_shell = false
618
624
619 ## Enables logging, and detailed output send back to the client. Usefull for
625 ## Enables logging, and detailed output send back to the client. Usefull for
620 ## debugging, shouldn't be used in production.
626 ## debugging, shouldn't be used in production.
621 ssh.enable_debug_logging = false
627 ssh.enable_debug_logging = false
622
628
623 ## API KEY for user who has access to fetch other user permission information
629 ## API KEY for user who has access to fetch other user permission information
624 ## most likely an super-admin account with some IP restrictions.
630 ## most likely an super-admin account with some IP restrictions.
625 ssh.api_key =
631 ssh.api_key =
626
632
627 ## API Host, the server address of RhodeCode instance that the api_key will
633 ## API Host, the server address of RhodeCode instance that the api_key will
628 ## access
634 ## access
629 ssh.api_host = http://localhost
635 ssh.api_host = http://localhost
630
636
631 ## Paths to binary executrables, by default they are the names, but we can
637 ## Paths to binary executrables, by default they are the names, but we can
632 ## override them if we want to use a custom one
638 ## override them if we want to use a custom one
633 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
639 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
634 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
640 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
635 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
641 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
636
642
637
643
638 ## Dummy marker to add new entries after.
644 ## Dummy marker to add new entries after.
639 ## Add any custom entries below. Please don't remove.
645 ## Add any custom entries below. Please don't remove.
640 custom.conf = 1
646 custom.conf = 1
641
647
642
648
643 ################################
649 ################################
644 ### LOGGING CONFIGURATION ####
650 ### LOGGING CONFIGURATION ####
645 ################################
651 ################################
646 [loggers]
652 [loggers]
647 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, ssh_wrapper
653 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, ssh_wrapper
648
654
649 [handlers]
655 [handlers]
650 keys = console, console_sql
656 keys = console, console_sql
651
657
652 [formatters]
658 [formatters]
653 keys = generic, color_formatter, color_formatter_sql
659 keys = generic, color_formatter, color_formatter_sql
654
660
655 #############
661 #############
656 ## LOGGERS ##
662 ## LOGGERS ##
657 #############
663 #############
658 [logger_root]
664 [logger_root]
659 level = NOTSET
665 level = NOTSET
660 handlers = console
666 handlers = console
661
667
662 [logger_routes]
668 [logger_routes]
663 level = DEBUG
669 level = DEBUG
664 handlers =
670 handlers =
665 qualname = routes.middleware
671 qualname = routes.middleware
666 ## "level = DEBUG" logs the route matched and routing variables.
672 ## "level = DEBUG" logs the route matched and routing variables.
667 propagate = 1
673 propagate = 1
668
674
669 [logger_beaker]
675 [logger_beaker]
670 level = DEBUG
676 level = DEBUG
671 handlers =
677 handlers =
672 qualname = beaker.container
678 qualname = beaker.container
673 propagate = 1
679 propagate = 1
674
680
675 [logger_templates]
681 [logger_templates]
676 level = INFO
682 level = INFO
677 handlers =
683 handlers =
678 qualname = pylons.templating
684 qualname = pylons.templating
679 propagate = 1
685 propagate = 1
680
686
681 [logger_rhodecode]
687 [logger_rhodecode]
682 level = DEBUG
688 level = DEBUG
683 handlers =
689 handlers =
684 qualname = rhodecode
690 qualname = rhodecode
685 propagate = 1
691 propagate = 1
686
692
687 [logger_sqlalchemy]
693 [logger_sqlalchemy]
688 level = INFO
694 level = INFO
689 handlers = console_sql
695 handlers = console_sql
690 qualname = sqlalchemy.engine
696 qualname = sqlalchemy.engine
691 propagate = 0
697 propagate = 0
692
698
693 [logger_ssh_wrapper]
699 [logger_ssh_wrapper]
694 level = DEBUG
700 level = DEBUG
695 handlers =
701 handlers =
696 qualname = ssh_wrapper
702 qualname = ssh_wrapper
697 propagate = 1
703 propagate = 1
698
704
699
705
700 ##############
706 ##############
701 ## HANDLERS ##
707 ## HANDLERS ##
702 ##############
708 ##############
703
709
704 [handler_console]
710 [handler_console]
705 class = StreamHandler
711 class = StreamHandler
706 args = (sys.stderr, )
712 args = (sys.stderr, )
707 level = DEBUG
713 level = DEBUG
708 formatter = color_formatter
714 formatter = color_formatter
709
715
710 [handler_console_sql]
716 [handler_console_sql]
711 class = StreamHandler
717 class = StreamHandler
712 args = (sys.stderr, )
718 args = (sys.stderr, )
713 level = DEBUG
719 level = DEBUG
714 formatter = color_formatter_sql
720 formatter = color_formatter_sql
715
721
716 ################
722 ################
717 ## FORMATTERS ##
723 ## FORMATTERS ##
718 ################
724 ################
719
725
720 [formatter_generic]
726 [formatter_generic]
721 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
727 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
722 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
728 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
723 datefmt = %Y-%m-%d %H:%M:%S
729 datefmt = %Y-%m-%d %H:%M:%S
724
730
725 [formatter_color_formatter]
731 [formatter_color_formatter]
726 class = rhodecode.lib.logging_formatter.ColorFormatter
732 class = rhodecode.lib.logging_formatter.ColorFormatter
727 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
733 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
728 datefmt = %Y-%m-%d %H:%M:%S
734 datefmt = %Y-%m-%d %H:%M:%S
729
735
730 [formatter_color_formatter_sql]
736 [formatter_color_formatter_sql]
731 class = rhodecode.lib.logging_formatter.ColorFormatterSql
737 class = rhodecode.lib.logging_formatter.ColorFormatterSql
732 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
738 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
733 datefmt = %Y-%m-%d %H:%M:%S
739 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,702 +1,708 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 #use = egg:waitress#main
54 #use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 #threads = 5
56 #threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 #max_request_body_size = 107374182400
58 #max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 #asyncore_use_poll = true
61 #asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 use = egg:gunicorn#main
69 use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 workers = 2
74 workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommened to be at 1
76 ## generally recommened to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 proc_name = rhodecode
79 proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 worker_class = sync
82 worker_class = sync
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 max_requests = 1000
87 max_requests = 1000
88 max_requests_jitter = 30
88 max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 timeout = 21600
91 timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 ## encryption key used to encrypt social plugin tokens,
111 ## encryption key used to encrypt social plugin tokens,
112 ## remote_urls with credentials etc, if not set it defaults to
112 ## remote_urls with credentials etc, if not set it defaults to
113 ## `beaker.session.secret`
113 ## `beaker.session.secret`
114 #rhodecode.encrypted_values.secret =
114 #rhodecode.encrypted_values.secret =
115
115
116 ## decryption strict mode (enabled by default). It controls if decryption raises
116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 #rhodecode.encrypted_values.strict = false
118 #rhodecode.encrypted_values.strict = false
119
119
120 ## return gzipped responses from Rhodecode (static files/application)
120 ## return gzipped responses from Rhodecode (static files/application)
121 gzip_responses = false
121 gzip_responses = false
122
122
123 ## autogenerate javascript routes file on startup
123 ## autogenerate javascript routes file on startup
124 generate_js_files = false
124 generate_js_files = false
125
125
126 ## Optional Languages
126 ## Optional Languages
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 lang = en
128 lang = en
129
129
130 ## perform a full repository scan on each server start, this should be
130 ## perform a full repository scan on each server start, this should be
131 ## set to false after first startup, to allow faster server restarts.
131 ## set to false after first startup, to allow faster server restarts.
132 startup.import_repos = false
132 startup.import_repos = false
133
133
134 ## Uncomment and set this path to use archive download cache.
134 ## Uncomment and set this path to use archive download cache.
135 ## Once enabled, generated archives will be cached at this location
135 ## Once enabled, generated archives will be cached at this location
136 ## and served from the cache during subsequent requests for the same archive of
136 ## and served from the cache during subsequent requests for the same archive of
137 ## the repository.
137 ## the repository.
138 #archive_cache_dir = /tmp/tarballcache
138 #archive_cache_dir = /tmp/tarballcache
139
139
140 ## change this to unique ID for security
140 ## change this to unique ID for security
141 app_instance_uuid = rc-production
141 app_instance_uuid = rc-production
142
142
143 ## cut off limit for large diffs (size in bytes)
143 ## cut off limit for large diffs (size in bytes). If overall diff size on
144 cut_off_limit_diff = 1024000
144 ## commit, or pull request exceeds this limit this diff will be displayed
145 cut_off_limit_file = 256000
145 ## partially. E.g 512000 == 512Kb
146 cut_off_limit_diff = 512000
147
148 ## cut off limit for large files inside diffs (size in bytes). Each individual
149 ## file inside diff which exceeds this limit will be displayed partially.
150 ## E.g 128000 == 128Kb
151 cut_off_limit_file = 128000
146
152
147 ## use cache version of scm repo everywhere
153 ## use cache version of scm repo everywhere
148 vcs_full_cache = true
154 vcs_full_cache = true
149
155
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
156 ## force https in RhodeCode, fixes https redirects, assumes it's always https
151 ## Normally this is controlled by proper http flags sent from http server
157 ## Normally this is controlled by proper http flags sent from http server
152 force_https = false
158 force_https = false
153
159
154 ## use Strict-Transport-Security headers
160 ## use Strict-Transport-Security headers
155 use_htsts = false
161 use_htsts = false
156
162
157 ## number of commits stats will parse on each iteration
163 ## number of commits stats will parse on each iteration
158 commit_parse_limit = 25
164 commit_parse_limit = 25
159
165
160 ## git rev filter option, --all is the default filter, if you need to
166 ## git rev filter option, --all is the default filter, if you need to
161 ## hide all refs in changelog switch this to --branches --tags
167 ## hide all refs in changelog switch this to --branches --tags
162 git_rev_filter = --branches --tags
168 git_rev_filter = --branches --tags
163
169
164 # Set to true if your repos are exposed using the dumb protocol
170 # Set to true if your repos are exposed using the dumb protocol
165 git_update_server_info = false
171 git_update_server_info = false
166
172
167 ## RSS/ATOM feed options
173 ## RSS/ATOM feed options
168 rss_cut_off_limit = 256000
174 rss_cut_off_limit = 256000
169 rss_items_per_page = 10
175 rss_items_per_page = 10
170 rss_include_diff = false
176 rss_include_diff = false
171
177
172 ## gist URL alias, used to create nicer urls for gist. This should be an
178 ## gist URL alias, used to create nicer urls for gist. This should be an
173 ## url that does rewrites to _admin/gists/{gistid}.
179 ## url that does rewrites to _admin/gists/{gistid}.
174 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
180 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
175 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
181 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
176 gist_alias_url =
182 gist_alias_url =
177
183
178 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
184 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
179 ## used for access.
185 ## used for access.
180 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
186 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
181 ## came from the the logged in user who own this authentication token.
187 ## came from the the logged in user who own this authentication token.
182 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
188 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
183 ## authentication token. Such view would be only accessible when used together
189 ## authentication token. Such view would be only accessible when used together
184 ## with this authentication token
190 ## with this authentication token
185 ##
191 ##
186 ## list of all views can be found under `/_admin/permissions/auth_token_access`
192 ## list of all views can be found under `/_admin/permissions/auth_token_access`
187 ## The list should be "," separated and on a single line.
193 ## The list should be "," separated and on a single line.
188 ##
194 ##
189 ## Most common views to enable:
195 ## Most common views to enable:
190 # RepoCommitsView:repo_commit_download
196 # RepoCommitsView:repo_commit_download
191 # RepoCommitsView:repo_commit_patch
197 # RepoCommitsView:repo_commit_patch
192 # RepoCommitsView:repo_commit_raw
198 # RepoCommitsView:repo_commit_raw
193 # RepoCommitsView:repo_commit_raw@TOKEN
199 # RepoCommitsView:repo_commit_raw@TOKEN
194 # RepoFilesView:repo_files_diff
200 # RepoFilesView:repo_files_diff
195 # RepoFilesView:repo_archivefile
201 # RepoFilesView:repo_archivefile
196 # RepoFilesView:repo_file_raw
202 # RepoFilesView:repo_file_raw
197 # GistView:*
203 # GistView:*
198 api_access_controllers_whitelist =
204 api_access_controllers_whitelist =
199
205
200 ## default encoding used to convert from and to unicode
206 ## default encoding used to convert from and to unicode
201 ## can be also a comma separated list of encoding in case of mixed encodings
207 ## can be also a comma separated list of encoding in case of mixed encodings
202 default_encoding = UTF-8
208 default_encoding = UTF-8
203
209
204 ## instance-id prefix
210 ## instance-id prefix
205 ## a prefix key for this instance used for cache invalidation when running
211 ## a prefix key for this instance used for cache invalidation when running
206 ## multiple instances of rhodecode, make sure it's globally unique for
212 ## multiple instances of rhodecode, make sure it's globally unique for
207 ## all running rhodecode instances. Leave empty if you don't use it
213 ## all running rhodecode instances. Leave empty if you don't use it
208 instance_id =
214 instance_id =
209
215
210 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
216 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
211 ## of an authentication plugin also if it is disabled by it's settings.
217 ## of an authentication plugin also if it is disabled by it's settings.
212 ## This could be useful if you are unable to log in to the system due to broken
218 ## This could be useful if you are unable to log in to the system due to broken
213 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
219 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
214 ## module to log in again and fix the settings.
220 ## module to log in again and fix the settings.
215 ##
221 ##
216 ## Available builtin plugin IDs (hash is part of the ID):
222 ## Available builtin plugin IDs (hash is part of the ID):
217 ## egg:rhodecode-enterprise-ce#rhodecode
223 ## egg:rhodecode-enterprise-ce#rhodecode
218 ## egg:rhodecode-enterprise-ce#pam
224 ## egg:rhodecode-enterprise-ce#pam
219 ## egg:rhodecode-enterprise-ce#ldap
225 ## egg:rhodecode-enterprise-ce#ldap
220 ## egg:rhodecode-enterprise-ce#jasig_cas
226 ## egg:rhodecode-enterprise-ce#jasig_cas
221 ## egg:rhodecode-enterprise-ce#headers
227 ## egg:rhodecode-enterprise-ce#headers
222 ## egg:rhodecode-enterprise-ce#crowd
228 ## egg:rhodecode-enterprise-ce#crowd
223 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
229 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
224
230
225 ## alternative return HTTP header for failed authentication. Default HTTP
231 ## alternative return HTTP header for failed authentication. Default HTTP
226 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
232 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
227 ## handling that causing a series of failed authentication calls.
233 ## handling that causing a series of failed authentication calls.
228 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
234 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
229 ## This will be served instead of default 401 on bad authnetication
235 ## This will be served instead of default 401 on bad authnetication
230 auth_ret_code =
236 auth_ret_code =
231
237
232 ## use special detection method when serving auth_ret_code, instead of serving
238 ## use special detection method when serving auth_ret_code, instead of serving
233 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
239 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
234 ## and then serve auth_ret_code to clients
240 ## and then serve auth_ret_code to clients
235 auth_ret_code_detection = false
241 auth_ret_code_detection = false
236
242
237 ## locking return code. When repository is locked return this HTTP code. 2XX
243 ## locking return code. When repository is locked return this HTTP code. 2XX
238 ## codes don't break the transactions while 4XX codes do
244 ## codes don't break the transactions while 4XX codes do
239 lock_ret_code = 423
245 lock_ret_code = 423
240
246
241 ## allows to change the repository location in settings page
247 ## allows to change the repository location in settings page
242 allow_repo_location_change = true
248 allow_repo_location_change = true
243
249
244 ## allows to setup custom hooks in settings page
250 ## allows to setup custom hooks in settings page
245 allow_custom_hooks_settings = true
251 allow_custom_hooks_settings = true
246
252
247 ## generated license token, goto license page in RhodeCode settings to obtain
253 ## generated license token, goto license page in RhodeCode settings to obtain
248 ## new token
254 ## new token
249 license_token =
255 license_token =
250
256
251 ## supervisor connection uri, for managing supervisor and logs.
257 ## supervisor connection uri, for managing supervisor and logs.
252 supervisor.uri =
258 supervisor.uri =
253 ## supervisord group name/id we only want this RC instance to handle
259 ## supervisord group name/id we only want this RC instance to handle
254 supervisor.group_id = prod
260 supervisor.group_id = prod
255
261
256 ## Display extended labs settings
262 ## Display extended labs settings
257 labs_settings_active = true
263 labs_settings_active = true
258
264
259 ####################################
265 ####################################
260 ### CELERY CONFIG ####
266 ### CELERY CONFIG ####
261 ####################################
267 ####################################
262 use_celery = false
268 use_celery = false
263 broker.host = localhost
269 broker.host = localhost
264 broker.vhost = rabbitmqhost
270 broker.vhost = rabbitmqhost
265 broker.port = 5672
271 broker.port = 5672
266 broker.user = rabbitmq
272 broker.user = rabbitmq
267 broker.password = qweqwe
273 broker.password = qweqwe
268
274
269 celery.imports = rhodecode.lib.celerylib.tasks
275 celery.imports = rhodecode.lib.celerylib.tasks
270
276
271 celery.result.backend = amqp
277 celery.result.backend = amqp
272 celery.result.dburi = amqp://
278 celery.result.dburi = amqp://
273 celery.result.serialier = json
279 celery.result.serialier = json
274
280
275 #celery.send.task.error.emails = true
281 #celery.send.task.error.emails = true
276 #celery.amqp.task.result.expires = 18000
282 #celery.amqp.task.result.expires = 18000
277
283
278 celeryd.concurrency = 2
284 celeryd.concurrency = 2
279 #celeryd.log.file = celeryd.log
285 #celeryd.log.file = celeryd.log
280 celeryd.log.level = debug
286 celeryd.log.level = debug
281 celeryd.max.tasks.per.child = 1
287 celeryd.max.tasks.per.child = 1
282
288
283 ## tasks will never be sent to the queue, but executed locally instead.
289 ## tasks will never be sent to the queue, but executed locally instead.
284 celery.always.eager = false
290 celery.always.eager = false
285
291
286 ####################################
292 ####################################
287 ### BEAKER CACHE ####
293 ### BEAKER CACHE ####
288 ####################################
294 ####################################
289 # default cache dir for templates. Putting this into a ramdisk
295 # default cache dir for templates. Putting this into a ramdisk
290 ## can boost performance, eg. %(here)s/data_ramdisk
296 ## can boost performance, eg. %(here)s/data_ramdisk
291 cache_dir = %(here)s/data
297 cache_dir = %(here)s/data
292
298
293 ## locking and default file storage for Beaker. Putting this into a ramdisk
299 ## locking and default file storage for Beaker. Putting this into a ramdisk
294 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
300 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
295 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
301 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
296 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
302 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
297
303
298 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
304 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
299
305
300 beaker.cache.super_short_term.type = memory
306 beaker.cache.super_short_term.type = memory
301 beaker.cache.super_short_term.expire = 10
307 beaker.cache.super_short_term.expire = 10
302 beaker.cache.super_short_term.key_length = 256
308 beaker.cache.super_short_term.key_length = 256
303
309
304 beaker.cache.short_term.type = memory
310 beaker.cache.short_term.type = memory
305 beaker.cache.short_term.expire = 60
311 beaker.cache.short_term.expire = 60
306 beaker.cache.short_term.key_length = 256
312 beaker.cache.short_term.key_length = 256
307
313
308 beaker.cache.long_term.type = memory
314 beaker.cache.long_term.type = memory
309 beaker.cache.long_term.expire = 36000
315 beaker.cache.long_term.expire = 36000
310 beaker.cache.long_term.key_length = 256
316 beaker.cache.long_term.key_length = 256
311
317
312 beaker.cache.sql_cache_short.type = memory
318 beaker.cache.sql_cache_short.type = memory
313 beaker.cache.sql_cache_short.expire = 10
319 beaker.cache.sql_cache_short.expire = 10
314 beaker.cache.sql_cache_short.key_length = 256
320 beaker.cache.sql_cache_short.key_length = 256
315
321
316 ## default is memory cache, configure only if required
322 ## default is memory cache, configure only if required
317 ## using multi-node or multi-worker setup
323 ## using multi-node or multi-worker setup
318 #beaker.cache.auth_plugins.type = ext:database
324 #beaker.cache.auth_plugins.type = ext:database
319 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
325 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
320 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
326 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
321 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
327 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
322 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
328 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
323 #beaker.cache.auth_plugins.sa.pool_size = 10
329 #beaker.cache.auth_plugins.sa.pool_size = 10
324 #beaker.cache.auth_plugins.sa.max_overflow = 0
330 #beaker.cache.auth_plugins.sa.max_overflow = 0
325
331
326 beaker.cache.repo_cache_long.type = memorylru_base
332 beaker.cache.repo_cache_long.type = memorylru_base
327 beaker.cache.repo_cache_long.max_items = 4096
333 beaker.cache.repo_cache_long.max_items = 4096
328 beaker.cache.repo_cache_long.expire = 2592000
334 beaker.cache.repo_cache_long.expire = 2592000
329
335
330 ## default is memorylru_base cache, configure only if required
336 ## default is memorylru_base cache, configure only if required
331 ## using multi-node or multi-worker setup
337 ## using multi-node or multi-worker setup
332 #beaker.cache.repo_cache_long.type = ext:memcached
338 #beaker.cache.repo_cache_long.type = ext:memcached
333 #beaker.cache.repo_cache_long.url = localhost:11211
339 #beaker.cache.repo_cache_long.url = localhost:11211
334 #beaker.cache.repo_cache_long.expire = 1209600
340 #beaker.cache.repo_cache_long.expire = 1209600
335 #beaker.cache.repo_cache_long.key_length = 256
341 #beaker.cache.repo_cache_long.key_length = 256
336
342
337 ####################################
343 ####################################
338 ### BEAKER SESSION ####
344 ### BEAKER SESSION ####
339 ####################################
345 ####################################
340
346
341 ## .session.type is type of storage options for the session, current allowed
347 ## .session.type is type of storage options for the session, current allowed
342 ## types are file, ext:memcached, ext:database, and memory (default).
348 ## types are file, ext:memcached, ext:database, and memory (default).
343 beaker.session.type = file
349 beaker.session.type = file
344 beaker.session.data_dir = %(here)s/data/sessions/data
350 beaker.session.data_dir = %(here)s/data/sessions/data
345
351
346 ## db based session, fast, and allows easy management over logged in users
352 ## db based session, fast, and allows easy management over logged in users
347 #beaker.session.type = ext:database
353 #beaker.session.type = ext:database
348 #beaker.session.table_name = db_session
354 #beaker.session.table_name = db_session
349 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
355 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
350 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
356 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
351 #beaker.session.sa.pool_recycle = 3600
357 #beaker.session.sa.pool_recycle = 3600
352 #beaker.session.sa.echo = false
358 #beaker.session.sa.echo = false
353
359
354 beaker.session.key = rhodecode
360 beaker.session.key = rhodecode
355 beaker.session.secret = production-rc-uytcxaz
361 beaker.session.secret = production-rc-uytcxaz
356 beaker.session.lock_dir = %(here)s/data/sessions/lock
362 beaker.session.lock_dir = %(here)s/data/sessions/lock
357
363
358 ## Secure encrypted cookie. Requires AES and AES python libraries
364 ## Secure encrypted cookie. Requires AES and AES python libraries
359 ## you must disable beaker.session.secret to use this
365 ## you must disable beaker.session.secret to use this
360 #beaker.session.encrypt_key = key_for_encryption
366 #beaker.session.encrypt_key = key_for_encryption
361 #beaker.session.validate_key = validation_key
367 #beaker.session.validate_key = validation_key
362
368
363 ## sets session as invalid(also logging out user) if it haven not been
369 ## sets session as invalid(also logging out user) if it haven not been
364 ## accessed for given amount of time in seconds
370 ## accessed for given amount of time in seconds
365 beaker.session.timeout = 2592000
371 beaker.session.timeout = 2592000
366 beaker.session.httponly = true
372 beaker.session.httponly = true
367 ## Path to use for the cookie. Set to prefix if you use prefix middleware
373 ## Path to use for the cookie. Set to prefix if you use prefix middleware
368 #beaker.session.cookie_path = /custom_prefix
374 #beaker.session.cookie_path = /custom_prefix
369
375
370 ## uncomment for https secure cookie
376 ## uncomment for https secure cookie
371 beaker.session.secure = false
377 beaker.session.secure = false
372
378
373 ## auto save the session to not to use .save()
379 ## auto save the session to not to use .save()
374 beaker.session.auto = false
380 beaker.session.auto = false
375
381
376 ## default cookie expiration time in seconds, set to `true` to set expire
382 ## default cookie expiration time in seconds, set to `true` to set expire
377 ## at browser close
383 ## at browser close
378 #beaker.session.cookie_expires = 3600
384 #beaker.session.cookie_expires = 3600
379
385
380 ###################################
386 ###################################
381 ## SEARCH INDEXING CONFIGURATION ##
387 ## SEARCH INDEXING CONFIGURATION ##
382 ###################################
388 ###################################
383 ## Full text search indexer is available in rhodecode-tools under
389 ## Full text search indexer is available in rhodecode-tools under
384 ## `rhodecode-tools index` command
390 ## `rhodecode-tools index` command
385
391
386 ## WHOOSH Backend, doesn't require additional services to run
392 ## WHOOSH Backend, doesn't require additional services to run
387 ## it works good with few dozen repos
393 ## it works good with few dozen repos
388 search.module = rhodecode.lib.index.whoosh
394 search.module = rhodecode.lib.index.whoosh
389 search.location = %(here)s/data/index
395 search.location = %(here)s/data/index
390
396
391 ########################################
397 ########################################
392 ### CHANNELSTREAM CONFIG ####
398 ### CHANNELSTREAM CONFIG ####
393 ########################################
399 ########################################
394 ## channelstream enables persistent connections and live notification
400 ## channelstream enables persistent connections and live notification
395 ## in the system. It's also used by the chat system
401 ## in the system. It's also used by the chat system
396 channelstream.enabled = false
402 channelstream.enabled = false
397
403
398 ## server address for channelstream server on the backend
404 ## server address for channelstream server on the backend
399 channelstream.server = 127.0.0.1:9800
405 channelstream.server = 127.0.0.1:9800
400
406
401 ## location of the channelstream server from outside world
407 ## location of the channelstream server from outside world
402 ## use ws:// for http or wss:// for https. This address needs to be handled
408 ## use ws:// for http or wss:// for https. This address needs to be handled
403 ## by external HTTP server such as Nginx or Apache
409 ## by external HTTP server such as Nginx or Apache
404 ## see nginx/apache configuration examples in our docs
410 ## see nginx/apache configuration examples in our docs
405 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
411 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
406 channelstream.secret = secret
412 channelstream.secret = secret
407 channelstream.history.location = %(here)s/channelstream_history
413 channelstream.history.location = %(here)s/channelstream_history
408
414
409 ## Internal application path that Javascript uses to connect into.
415 ## Internal application path that Javascript uses to connect into.
410 ## If you use proxy-prefix the prefix should be added before /_channelstream
416 ## If you use proxy-prefix the prefix should be added before /_channelstream
411 channelstream.proxy_path = /_channelstream
417 channelstream.proxy_path = /_channelstream
412
418
413
419
414 ###################################
420 ###################################
415 ## APPENLIGHT CONFIG ##
421 ## APPENLIGHT CONFIG ##
416 ###################################
422 ###################################
417
423
418 ## Appenlight is tailored to work with RhodeCode, see
424 ## Appenlight is tailored to work with RhodeCode, see
419 ## http://appenlight.com for details how to obtain an account
425 ## http://appenlight.com for details how to obtain an account
420
426
421 ## appenlight integration enabled
427 ## appenlight integration enabled
422 appenlight = false
428 appenlight = false
423
429
424 appenlight.server_url = https://api.appenlight.com
430 appenlight.server_url = https://api.appenlight.com
425 appenlight.api_key = YOUR_API_KEY
431 appenlight.api_key = YOUR_API_KEY
426 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
432 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
427
433
428 # used for JS client
434 # used for JS client
429 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
435 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
430
436
431 ## TWEAK AMOUNT OF INFO SENT HERE
437 ## TWEAK AMOUNT OF INFO SENT HERE
432
438
433 ## enables 404 error logging (default False)
439 ## enables 404 error logging (default False)
434 appenlight.report_404 = false
440 appenlight.report_404 = false
435
441
436 ## time in seconds after request is considered being slow (default 1)
442 ## time in seconds after request is considered being slow (default 1)
437 appenlight.slow_request_time = 1
443 appenlight.slow_request_time = 1
438
444
439 ## record slow requests in application
445 ## record slow requests in application
440 ## (needs to be enabled for slow datastore recording and time tracking)
446 ## (needs to be enabled for slow datastore recording and time tracking)
441 appenlight.slow_requests = true
447 appenlight.slow_requests = true
442
448
443 ## enable hooking to application loggers
449 ## enable hooking to application loggers
444 appenlight.logging = true
450 appenlight.logging = true
445
451
446 ## minimum log level for log capture
452 ## minimum log level for log capture
447 appenlight.logging.level = WARNING
453 appenlight.logging.level = WARNING
448
454
449 ## send logs only from erroneous/slow requests
455 ## send logs only from erroneous/slow requests
450 ## (saves API quota for intensive logging)
456 ## (saves API quota for intensive logging)
451 appenlight.logging_on_error = false
457 appenlight.logging_on_error = false
452
458
453 ## list of additonal keywords that should be grabbed from environ object
459 ## list of additonal keywords that should be grabbed from environ object
454 ## can be string with comma separated list of words in lowercase
460 ## can be string with comma separated list of words in lowercase
455 ## (by default client will always send following info:
461 ## (by default client will always send following info:
456 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
462 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
457 ## start with HTTP* this list be extended with additional keywords here
463 ## start with HTTP* this list be extended with additional keywords here
458 appenlight.environ_keys_whitelist =
464 appenlight.environ_keys_whitelist =
459
465
460 ## list of keywords that should be blanked from request object
466 ## list of keywords that should be blanked from request object
461 ## can be string with comma separated list of words in lowercase
467 ## can be string with comma separated list of words in lowercase
462 ## (by default client will always blank keys that contain following words
468 ## (by default client will always blank keys that contain following words
463 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
469 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
464 ## this list be extended with additional keywords set here
470 ## this list be extended with additional keywords set here
465 appenlight.request_keys_blacklist =
471 appenlight.request_keys_blacklist =
466
472
467 ## list of namespaces that should be ignores when gathering log entries
473 ## list of namespaces that should be ignores when gathering log entries
468 ## can be string with comma separated list of namespaces
474 ## can be string with comma separated list of namespaces
469 ## (by default the client ignores own entries: appenlight_client.client)
475 ## (by default the client ignores own entries: appenlight_client.client)
470 appenlight.log_namespace_blacklist =
476 appenlight.log_namespace_blacklist =
471
477
472
478
473 ################################################################################
479 ################################################################################
474 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
480 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
475 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
481 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
476 ## execute malicious code after an exception is raised. ##
482 ## execute malicious code after an exception is raised. ##
477 ################################################################################
483 ################################################################################
478 set debug = false
484 set debug = false
479
485
480
486
481 ###########################################
487 ###########################################
482 ### MAIN RHODECODE DATABASE CONFIG ###
488 ### MAIN RHODECODE DATABASE CONFIG ###
483 ###########################################
489 ###########################################
484 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
490 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
485 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
491 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
486 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
492 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
487 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
493 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
488
494
489 # see sqlalchemy docs for other advanced settings
495 # see sqlalchemy docs for other advanced settings
490
496
491 ## print the sql statements to output
497 ## print the sql statements to output
492 sqlalchemy.db1.echo = false
498 sqlalchemy.db1.echo = false
493 ## recycle the connections after this amount of seconds
499 ## recycle the connections after this amount of seconds
494 sqlalchemy.db1.pool_recycle = 3600
500 sqlalchemy.db1.pool_recycle = 3600
495 sqlalchemy.db1.convert_unicode = true
501 sqlalchemy.db1.convert_unicode = true
496
502
497 ## the number of connections to keep open inside the connection pool.
503 ## the number of connections to keep open inside the connection pool.
498 ## 0 indicates no limit
504 ## 0 indicates no limit
499 #sqlalchemy.db1.pool_size = 5
505 #sqlalchemy.db1.pool_size = 5
500
506
501 ## the number of connections to allow in connection pool "overflow", that is
507 ## the number of connections to allow in connection pool "overflow", that is
502 ## connections that can be opened above and beyond the pool_size setting,
508 ## connections that can be opened above and beyond the pool_size setting,
503 ## which defaults to five.
509 ## which defaults to five.
504 #sqlalchemy.db1.max_overflow = 10
510 #sqlalchemy.db1.max_overflow = 10
505
511
506
512
507 ##################
513 ##################
508 ### VCS CONFIG ###
514 ### VCS CONFIG ###
509 ##################
515 ##################
510 vcs.server.enable = true
516 vcs.server.enable = true
511 vcs.server = localhost:9900
517 vcs.server = localhost:9900
512
518
513 ## Web server connectivity protocol, responsible for web based VCS operatations
519 ## Web server connectivity protocol, responsible for web based VCS operatations
514 ## Available protocols are:
520 ## Available protocols are:
515 ## `http` - use http-rpc backend (default)
521 ## `http` - use http-rpc backend (default)
516 vcs.server.protocol = http
522 vcs.server.protocol = http
517
523
518 ## Push/Pull operations protocol, available options are:
524 ## Push/Pull operations protocol, available options are:
519 ## `http` - use http-rpc backend (default)
525 ## `http` - use http-rpc backend (default)
520 ##
526 ##
521 vcs.scm_app_implementation = http
527 vcs.scm_app_implementation = http
522
528
523 ## Push/Pull operations hooks protocol, available options are:
529 ## Push/Pull operations hooks protocol, available options are:
524 ## `http` - use http-rpc backend (default)
530 ## `http` - use http-rpc backend (default)
525 vcs.hooks.protocol = http
531 vcs.hooks.protocol = http
526
532
527 vcs.server.log_level = info
533 vcs.server.log_level = info
528 ## Start VCSServer with this instance as a subprocess, usefull for development
534 ## Start VCSServer with this instance as a subprocess, usefull for development
529 vcs.start_server = false
535 vcs.start_server = false
530
536
531 ## List of enabled VCS backends, available options are:
537 ## List of enabled VCS backends, available options are:
532 ## `hg` - mercurial
538 ## `hg` - mercurial
533 ## `git` - git
539 ## `git` - git
534 ## `svn` - subversion
540 ## `svn` - subversion
535 vcs.backends = hg, git, svn
541 vcs.backends = hg, git, svn
536
542
537 vcs.connection_timeout = 3600
543 vcs.connection_timeout = 3600
538 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
544 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
539 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
545 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
540 #vcs.svn.compatible_version = pre-1.8-compatible
546 #vcs.svn.compatible_version = pre-1.8-compatible
541
547
542
548
543 ############################################################
549 ############################################################
544 ### Subversion proxy support (mod_dav_svn) ###
550 ### Subversion proxy support (mod_dav_svn) ###
545 ### Maps RhodeCode repo groups into SVN paths for Apache ###
551 ### Maps RhodeCode repo groups into SVN paths for Apache ###
546 ############################################################
552 ############################################################
547 ## Enable or disable the config file generation.
553 ## Enable or disable the config file generation.
548 svn.proxy.generate_config = false
554 svn.proxy.generate_config = false
549 ## Generate config file with `SVNListParentPath` set to `On`.
555 ## Generate config file with `SVNListParentPath` set to `On`.
550 svn.proxy.list_parent_path = true
556 svn.proxy.list_parent_path = true
551 ## Set location and file name of generated config file.
557 ## Set location and file name of generated config file.
552 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
558 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
553 ## Used as a prefix to the `Location` block in the generated config file.
559 ## Used as a prefix to the `Location` block in the generated config file.
554 ## In most cases it should be set to `/`.
560 ## In most cases it should be set to `/`.
555 svn.proxy.location_root = /
561 svn.proxy.location_root = /
556 ## Command to reload the mod dav svn configuration on change.
562 ## Command to reload the mod dav svn configuration on change.
557 ## Example: `/etc/init.d/apache2 reload`
563 ## Example: `/etc/init.d/apache2 reload`
558 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
564 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
559 ## If the timeout expires before the reload command finishes, the command will
565 ## If the timeout expires before the reload command finishes, the command will
560 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
566 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
561 #svn.proxy.reload_timeout = 10
567 #svn.proxy.reload_timeout = 10
562
568
563 ############################################################
569 ############################################################
564 ### SSH Support Settings ###
570 ### SSH Support Settings ###
565 ############################################################
571 ############################################################
566
572
567 ## Defines if the authorized_keys file should be written on any change of
573 ## Defines if the authorized_keys file should be written on any change of
568 ## user ssh keys, setting this to false also disables posibility of adding
574 ## user ssh keys, setting this to false also disables posibility of adding
569 ## ssh keys for users from web interface.
575 ## ssh keys for users from web interface.
570 ssh.generate_authorized_keyfile = false
576 ssh.generate_authorized_keyfile = false
571
577
572 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
578 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
573 # ssh.authorized_keys_ssh_opts =
579 # ssh.authorized_keys_ssh_opts =
574
580
575 ## File to generate the authorized keys together with options
581 ## File to generate the authorized keys together with options
576 ## It is possible to have multiple key files specified in `sshd_config` e.g.
582 ## It is possible to have multiple key files specified in `sshd_config` e.g.
577 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
583 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
578 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
584 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
579
585
580 ## Command to execute the SSH wrapper. The binary is available in the
586 ## Command to execute the SSH wrapper. The binary is available in the
581 ## rhodecode installation directory.
587 ## rhodecode installation directory.
582 ## e.g ~/.rccontrol/community-1/profile/bin/rcssh-wrapper
588 ## e.g ~/.rccontrol/community-1/profile/bin/rcssh-wrapper
583 ssh.wrapper_cmd = ~/.rccontrol/community-1/rcssh-wrapper
589 ssh.wrapper_cmd = ~/.rccontrol/community-1/rcssh-wrapper
584
590
585 ## Allow shell when executing the ssh-wrapper command
591 ## Allow shell when executing the ssh-wrapper command
586 ssh.wrapper_cmd_allow_shell = false
592 ssh.wrapper_cmd_allow_shell = false
587
593
588 ## Enables logging, and detailed output send back to the client. Usefull for
594 ## Enables logging, and detailed output send back to the client. Usefull for
589 ## debugging, shouldn't be used in production.
595 ## debugging, shouldn't be used in production.
590 ssh.enable_debug_logging = false
596 ssh.enable_debug_logging = false
591
597
592 ## API KEY for user who has access to fetch other user permission information
598 ## API KEY for user who has access to fetch other user permission information
593 ## most likely an super-admin account with some IP restrictions.
599 ## most likely an super-admin account with some IP restrictions.
594 ssh.api_key =
600 ssh.api_key =
595
601
596 ## API Host, the server address of RhodeCode instance that the api_key will
602 ## API Host, the server address of RhodeCode instance that the api_key will
597 ## access
603 ## access
598 ssh.api_host = http://localhost
604 ssh.api_host = http://localhost
599
605
600 ## Paths to binary executrables, by default they are the names, but we can
606 ## Paths to binary executrables, by default they are the names, but we can
601 ## override them if we want to use a custom one
607 ## override them if we want to use a custom one
602 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
608 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
603 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
609 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
604 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
610 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
605
611
606
612
607 ## Dummy marker to add new entries after.
613 ## Dummy marker to add new entries after.
608 ## Add any custom entries below. Please don't remove.
614 ## Add any custom entries below. Please don't remove.
609 custom.conf = 1
615 custom.conf = 1
610
616
611
617
612 ################################
618 ################################
613 ### LOGGING CONFIGURATION ####
619 ### LOGGING CONFIGURATION ####
614 ################################
620 ################################
615 [loggers]
621 [loggers]
616 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, ssh_wrapper
622 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, ssh_wrapper
617
623
618 [handlers]
624 [handlers]
619 keys = console, console_sql
625 keys = console, console_sql
620
626
621 [formatters]
627 [formatters]
622 keys = generic, color_formatter, color_formatter_sql
628 keys = generic, color_formatter, color_formatter_sql
623
629
624 #############
630 #############
625 ## LOGGERS ##
631 ## LOGGERS ##
626 #############
632 #############
627 [logger_root]
633 [logger_root]
628 level = NOTSET
634 level = NOTSET
629 handlers = console
635 handlers = console
630
636
631 [logger_routes]
637 [logger_routes]
632 level = DEBUG
638 level = DEBUG
633 handlers =
639 handlers =
634 qualname = routes.middleware
640 qualname = routes.middleware
635 ## "level = DEBUG" logs the route matched and routing variables.
641 ## "level = DEBUG" logs the route matched and routing variables.
636 propagate = 1
642 propagate = 1
637
643
638 [logger_beaker]
644 [logger_beaker]
639 level = DEBUG
645 level = DEBUG
640 handlers =
646 handlers =
641 qualname = beaker.container
647 qualname = beaker.container
642 propagate = 1
648 propagate = 1
643
649
644 [logger_templates]
650 [logger_templates]
645 level = INFO
651 level = INFO
646 handlers =
652 handlers =
647 qualname = pylons.templating
653 qualname = pylons.templating
648 propagate = 1
654 propagate = 1
649
655
650 [logger_rhodecode]
656 [logger_rhodecode]
651 level = DEBUG
657 level = DEBUG
652 handlers =
658 handlers =
653 qualname = rhodecode
659 qualname = rhodecode
654 propagate = 1
660 propagate = 1
655
661
656 [logger_sqlalchemy]
662 [logger_sqlalchemy]
657 level = INFO
663 level = INFO
658 handlers = console_sql
664 handlers = console_sql
659 qualname = sqlalchemy.engine
665 qualname = sqlalchemy.engine
660 propagate = 0
666 propagate = 0
661
667
662 [logger_ssh_wrapper]
668 [logger_ssh_wrapper]
663 level = DEBUG
669 level = DEBUG
664 handlers =
670 handlers =
665 qualname = ssh_wrapper
671 qualname = ssh_wrapper
666 propagate = 1
672 propagate = 1
667
673
668
674
669 ##############
675 ##############
670 ## HANDLERS ##
676 ## HANDLERS ##
671 ##############
677 ##############
672
678
673 [handler_console]
679 [handler_console]
674 class = StreamHandler
680 class = StreamHandler
675 args = (sys.stderr, )
681 args = (sys.stderr, )
676 level = INFO
682 level = INFO
677 formatter = generic
683 formatter = generic
678
684
679 [handler_console_sql]
685 [handler_console_sql]
680 class = StreamHandler
686 class = StreamHandler
681 args = (sys.stderr, )
687 args = (sys.stderr, )
682 level = WARN
688 level = WARN
683 formatter = generic
689 formatter = generic
684
690
685 ################
691 ################
686 ## FORMATTERS ##
692 ## FORMATTERS ##
687 ################
693 ################
688
694
689 [formatter_generic]
695 [formatter_generic]
690 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
696 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
691 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
697 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
692 datefmt = %Y-%m-%d %H:%M:%S
698 datefmt = %Y-%m-%d %H:%M:%S
693
699
694 [formatter_color_formatter]
700 [formatter_color_formatter]
695 class = rhodecode.lib.logging_formatter.ColorFormatter
701 class = rhodecode.lib.logging_formatter.ColorFormatter
696 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
702 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
697 datefmt = %Y-%m-%d %H:%M:%S
703 datefmt = %Y-%m-%d %H:%M:%S
698
704
699 [formatter_color_formatter_sql]
705 [formatter_color_formatter_sql]
700 class = rhodecode.lib.logging_formatter.ColorFormatterSql
706 class = rhodecode.lib.logging_formatter.ColorFormatterSql
701 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
707 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
702 datefmt = %Y-%m-%d %H:%M:%S
708 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,1164 +1,1170 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Set of diffing helpers, previously part of vcs
23 Set of diffing helpers, previously part of vcs
24 """
24 """
25
25
26 import collections
26 import collections
27 import re
27 import re
28 import difflib
28 import difflib
29 import logging
29 import logging
30
30
31 from itertools import tee, imap
31 from itertools import tee, imap
32
32
33 from pylons.i18n.translation import _
33 from pylons.i18n.translation import _
34
34
35 from rhodecode.lib.vcs.exceptions import VCSError
35 from rhodecode.lib.vcs.exceptions import VCSError
36 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
36 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib.helpers import escape
38 from rhodecode.lib.helpers import escape
39 from rhodecode.lib.utils2 import safe_unicode
39 from rhodecode.lib.utils2 import safe_unicode
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43 # define max context, a file with more than this numbers of lines is unusable
43 # define max context, a file with more than this numbers of lines is unusable
44 # in browser anyway
44 # in browser anyway
45 MAX_CONTEXT = 1024 * 1014
45 MAX_CONTEXT = 1024 * 1014
46
46
47
47
48 class OPS(object):
48 class OPS(object):
49 ADD = 'A'
49 ADD = 'A'
50 MOD = 'M'
50 MOD = 'M'
51 DEL = 'D'
51 DEL = 'D'
52
52
53
53
54 def wrap_to_table(str_):
54 def wrap_to_table(str_):
55 return '''<table class="code-difftable">
55 return '''<table class="code-difftable">
56 <tr class="line no-comment">
56 <tr class="line no-comment">
57 <td class="add-comment-line tooltip" title="%s"><span class="add-comment-content"></span></td>
57 <td class="add-comment-line tooltip" title="%s"><span class="add-comment-content"></span></td>
58 <td></td>
58 <td></td>
59 <td class="lineno new"></td>
59 <td class="lineno new"></td>
60 <td class="code no-comment"><pre>%s</pre></td>
60 <td class="code no-comment"><pre>%s</pre></td>
61 </tr>
61 </tr>
62 </table>''' % (_('Click to comment'), str_)
62 </table>''' % (_('Click to comment'), str_)
63
63
64
64
65 def wrapped_diff(filenode_old, filenode_new, diff_limit=None, file_limit=None,
65 def wrapped_diff(filenode_old, filenode_new, diff_limit=None, file_limit=None,
66 show_full_diff=False, ignore_whitespace=True, line_context=3,
66 show_full_diff=False, ignore_whitespace=True, line_context=3,
67 enable_comments=False):
67 enable_comments=False):
68 """
68 """
69 returns a wrapped diff into a table, checks for cut_off_limit for file and
69 returns a wrapped diff into a table, checks for cut_off_limit for file and
70 whole diff and presents proper message
70 whole diff and presents proper message
71 """
71 """
72
72
73 if filenode_old is None:
73 if filenode_old is None:
74 filenode_old = FileNode(filenode_new.path, '', EmptyCommit())
74 filenode_old = FileNode(filenode_new.path, '', EmptyCommit())
75
75
76 if filenode_old.is_binary or filenode_new.is_binary:
76 if filenode_old.is_binary or filenode_new.is_binary:
77 diff = wrap_to_table(_('Binary file'))
77 diff = wrap_to_table(_('Binary file'))
78 stats = None
78 stats = None
79 size = 0
79 size = 0
80 data = None
80 data = None
81
81
82 elif diff_limit != -1 and (diff_limit is None or
82 elif diff_limit != -1 and (diff_limit is None or
83 (filenode_old.size < diff_limit and filenode_new.size < diff_limit)):
83 (filenode_old.size < diff_limit and filenode_new.size < diff_limit)):
84
84
85 f_gitdiff = get_gitdiff(filenode_old, filenode_new,
85 f_gitdiff = get_gitdiff(filenode_old, filenode_new,
86 ignore_whitespace=ignore_whitespace,
86 ignore_whitespace=ignore_whitespace,
87 context=line_context)
87 context=line_context)
88 diff_processor = DiffProcessor(
88 diff_processor = DiffProcessor(
89 f_gitdiff, format='gitdiff', diff_limit=diff_limit,
89 f_gitdiff, format='gitdiff', diff_limit=diff_limit,
90 file_limit=file_limit, show_full_diff=show_full_diff)
90 file_limit=file_limit, show_full_diff=show_full_diff)
91 _parsed = diff_processor.prepare()
91 _parsed = diff_processor.prepare()
92
92
93 diff = diff_processor.as_html(enable_comments=enable_comments)
93 diff = diff_processor.as_html(enable_comments=enable_comments)
94 stats = _parsed[0]['stats'] if _parsed else None
94 stats = _parsed[0]['stats'] if _parsed else None
95 size = len(diff or '')
95 size = len(diff or '')
96 data = _parsed[0] if _parsed else None
96 data = _parsed[0] if _parsed else None
97 else:
97 else:
98 diff = wrap_to_table(_('Changeset was too big and was cut off, use '
98 diff = wrap_to_table(_('Changeset was too big and was cut off, use '
99 'diff menu to display this diff'))
99 'diff menu to display this diff'))
100 stats = None
100 stats = None
101 size = 0
101 size = 0
102 data = None
102 data = None
103 if not diff:
103 if not diff:
104 submodules = filter(lambda o: isinstance(o, SubModuleNode),
104 submodules = filter(lambda o: isinstance(o, SubModuleNode),
105 [filenode_new, filenode_old])
105 [filenode_new, filenode_old])
106 if submodules:
106 if submodules:
107 diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
107 diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
108 else:
108 else:
109 diff = wrap_to_table(_('No changes detected'))
109 diff = wrap_to_table(_('No changes detected'))
110
110
111 cs1 = filenode_old.commit.raw_id
111 cs1 = filenode_old.commit.raw_id
112 cs2 = filenode_new.commit.raw_id
112 cs2 = filenode_new.commit.raw_id
113
113
114 return size, cs1, cs2, diff, stats, data
114 return size, cs1, cs2, diff, stats, data
115
115
116
116
117 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
117 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
118 """
118 """
119 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
119 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
120
120
121 :param ignore_whitespace: ignore whitespaces in diff
121 :param ignore_whitespace: ignore whitespaces in diff
122 """
122 """
123 # make sure we pass in default context
123 # make sure we pass in default context
124 context = context or 3
124 context = context or 3
125 # protect against IntOverflow when passing HUGE context
125 # protect against IntOverflow when passing HUGE context
126 if context > MAX_CONTEXT:
126 if context > MAX_CONTEXT:
127 context = MAX_CONTEXT
127 context = MAX_CONTEXT
128
128
129 submodules = filter(lambda o: isinstance(o, SubModuleNode),
129 submodules = filter(lambda o: isinstance(o, SubModuleNode),
130 [filenode_new, filenode_old])
130 [filenode_new, filenode_old])
131 if submodules:
131 if submodules:
132 return ''
132 return ''
133
133
134 for filenode in (filenode_old, filenode_new):
134 for filenode in (filenode_old, filenode_new):
135 if not isinstance(filenode, FileNode):
135 if not isinstance(filenode, FileNode):
136 raise VCSError(
136 raise VCSError(
137 "Given object should be FileNode object, not %s"
137 "Given object should be FileNode object, not %s"
138 % filenode.__class__)
138 % filenode.__class__)
139
139
140 repo = filenode_new.commit.repository
140 repo = filenode_new.commit.repository
141 old_commit = filenode_old.commit or repo.EMPTY_COMMIT
141 old_commit = filenode_old.commit or repo.EMPTY_COMMIT
142 new_commit = filenode_new.commit
142 new_commit = filenode_new.commit
143
143
144 vcs_gitdiff = repo.get_diff(
144 vcs_gitdiff = repo.get_diff(
145 old_commit, new_commit, filenode_new.path,
145 old_commit, new_commit, filenode_new.path,
146 ignore_whitespace, context, path1=filenode_old.path)
146 ignore_whitespace, context, path1=filenode_old.path)
147 return vcs_gitdiff
147 return vcs_gitdiff
148
148
149 NEW_FILENODE = 1
149 NEW_FILENODE = 1
150 DEL_FILENODE = 2
150 DEL_FILENODE = 2
151 MOD_FILENODE = 3
151 MOD_FILENODE = 3
152 RENAMED_FILENODE = 4
152 RENAMED_FILENODE = 4
153 COPIED_FILENODE = 5
153 COPIED_FILENODE = 5
154 CHMOD_FILENODE = 6
154 CHMOD_FILENODE = 6
155 BIN_FILENODE = 7
155 BIN_FILENODE = 7
156
156
157
157
158 class LimitedDiffContainer(object):
158 class LimitedDiffContainer(object):
159
159
160 def __init__(self, diff_limit, cur_diff_size, diff):
160 def __init__(self, diff_limit, cur_diff_size, diff):
161 self.diff = diff
161 self.diff = diff
162 self.diff_limit = diff_limit
162 self.diff_limit = diff_limit
163 self.cur_diff_size = cur_diff_size
163 self.cur_diff_size = cur_diff_size
164
164
165 def __getitem__(self, key):
165 def __getitem__(self, key):
166 return self.diff.__getitem__(key)
166 return self.diff.__getitem__(key)
167
167
168 def __iter__(self):
168 def __iter__(self):
169 for l in self.diff:
169 for l in self.diff:
170 yield l
170 yield l
171
171
172
172
173 class Action(object):
173 class Action(object):
174 """
174 """
175 Contains constants for the action value of the lines in a parsed diff.
175 Contains constants for the action value of the lines in a parsed diff.
176 """
176 """
177
177
178 ADD = 'add'
178 ADD = 'add'
179 DELETE = 'del'
179 DELETE = 'del'
180 UNMODIFIED = 'unmod'
180 UNMODIFIED = 'unmod'
181
181
182 CONTEXT = 'context'
182 CONTEXT = 'context'
183 OLD_NO_NL = 'old-no-nl'
183 OLD_NO_NL = 'old-no-nl'
184 NEW_NO_NL = 'new-no-nl'
184 NEW_NO_NL = 'new-no-nl'
185
185
186
186
187 class DiffProcessor(object):
187 class DiffProcessor(object):
188 """
188 """
189 Give it a unified or git diff and it returns a list of the files that were
189 Give it a unified or git diff and it returns a list of the files that were
190 mentioned in the diff together with a dict of meta information that
190 mentioned in the diff together with a dict of meta information that
191 can be used to render it in a HTML template.
191 can be used to render it in a HTML template.
192
192
193 .. note:: Unicode handling
193 .. note:: Unicode handling
194
194
195 The original diffs are a byte sequence and can contain filenames
195 The original diffs are a byte sequence and can contain filenames
196 in mixed encodings. This class generally returns `unicode` objects
196 in mixed encodings. This class generally returns `unicode` objects
197 since the result is intended for presentation to the user.
197 since the result is intended for presentation to the user.
198
198
199 """
199 """
200 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
200 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
201 _newline_marker = re.compile(r'^\\ No newline at end of file')
201 _newline_marker = re.compile(r'^\\ No newline at end of file')
202
202
203 # used for inline highlighter word split
203 # used for inline highlighter word split
204 _token_re = re.compile(r'()(&gt;|&lt;|&amp;|\W+?)')
204 _token_re = re.compile(r'()(&gt;|&lt;|&amp;|\W+?)')
205
205
206 # collapse ranges of commits over given number
206 # collapse ranges of commits over given number
207 _collapse_commits_over = 5
207 _collapse_commits_over = 5
208
208
209 def __init__(self, diff, format='gitdiff', diff_limit=None,
209 def __init__(self, diff, format='gitdiff', diff_limit=None,
210 file_limit=None, show_full_diff=True):
210 file_limit=None, show_full_diff=True):
211 """
211 """
212 :param diff: A `Diff` object representing a diff from a vcs backend
212 :param diff: A `Diff` object representing a diff from a vcs backend
213 :param format: format of diff passed, `udiff` or `gitdiff`
213 :param format: format of diff passed, `udiff` or `gitdiff`
214 :param diff_limit: define the size of diff that is considered "big"
214 :param diff_limit: define the size of diff that is considered "big"
215 based on that parameter cut off will be triggered, set to None
215 based on that parameter cut off will be triggered, set to None
216 to show full diff
216 to show full diff
217 """
217 """
218 self._diff = diff
218 self._diff = diff
219 self._format = format
219 self._format = format
220 self.adds = 0
220 self.adds = 0
221 self.removes = 0
221 self.removes = 0
222 # calculate diff size
222 # calculate diff size
223 self.diff_limit = diff_limit
223 self.diff_limit = diff_limit
224 self.file_limit = file_limit
224 self.file_limit = file_limit
225 self.show_full_diff = show_full_diff
225 self.show_full_diff = show_full_diff
226 self.cur_diff_size = 0
226 self.cur_diff_size = 0
227 self.parsed = False
227 self.parsed = False
228 self.parsed_diff = []
228 self.parsed_diff = []
229
229
230 log.debug('Initialized DiffProcessor with %s mode', format)
230 if format == 'gitdiff':
231 if format == 'gitdiff':
231 self.differ = self._highlight_line_difflib
232 self.differ = self._highlight_line_difflib
232 self._parser = self._parse_gitdiff
233 self._parser = self._parse_gitdiff
233 else:
234 else:
234 self.differ = self._highlight_line_udiff
235 self.differ = self._highlight_line_udiff
235 self._parser = self._new_parse_gitdiff
236 self._parser = self._new_parse_gitdiff
236
237
237 def _copy_iterator(self):
238 def _copy_iterator(self):
238 """
239 """
239 make a fresh copy of generator, we should not iterate thru
240 make a fresh copy of generator, we should not iterate thru
240 an original as it's needed for repeating operations on
241 an original as it's needed for repeating operations on
241 this instance of DiffProcessor
242 this instance of DiffProcessor
242 """
243 """
243 self.__udiff, iterator_copy = tee(self.__udiff)
244 self.__udiff, iterator_copy = tee(self.__udiff)
244 return iterator_copy
245 return iterator_copy
245
246
246 def _escaper(self, string):
247 def _escaper(self, string):
247 """
248 """
248 Escaper for diff escapes special chars and checks the diff limit
249 Escaper for diff escapes special chars and checks the diff limit
249
250
250 :param string:
251 :param string:
251 """
252 """
252
253
253 self.cur_diff_size += len(string)
254 self.cur_diff_size += len(string)
254
255
255 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
256 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
256 raise DiffLimitExceeded('Diff Limit Exceeded')
257 raise DiffLimitExceeded('Diff Limit Exceeded')
257
258
258 return safe_unicode(string)\
259 return safe_unicode(string)\
259 .replace('&', '&amp;')\
260 .replace('&', '&amp;')\
260 .replace('<', '&lt;')\
261 .replace('<', '&lt;')\
261 .replace('>', '&gt;')
262 .replace('>', '&gt;')
262
263
263 def _line_counter(self, l):
264 def _line_counter(self, l):
264 """
265 """
265 Checks each line and bumps total adds/removes for this diff
266 Checks each line and bumps total adds/removes for this diff
266
267
267 :param l:
268 :param l:
268 """
269 """
269 if l.startswith('+') and not l.startswith('+++'):
270 if l.startswith('+') and not l.startswith('+++'):
270 self.adds += 1
271 self.adds += 1
271 elif l.startswith('-') and not l.startswith('---'):
272 elif l.startswith('-') and not l.startswith('---'):
272 self.removes += 1
273 self.removes += 1
273 return safe_unicode(l)
274 return safe_unicode(l)
274
275
275 def _highlight_line_difflib(self, line, next_):
276 def _highlight_line_difflib(self, line, next_):
276 """
277 """
277 Highlight inline changes in both lines.
278 Highlight inline changes in both lines.
278 """
279 """
279
280
280 if line['action'] == Action.DELETE:
281 if line['action'] == Action.DELETE:
281 old, new = line, next_
282 old, new = line, next_
282 else:
283 else:
283 old, new = next_, line
284 old, new = next_, line
284
285
285 oldwords = self._token_re.split(old['line'])
286 oldwords = self._token_re.split(old['line'])
286 newwords = self._token_re.split(new['line'])
287 newwords = self._token_re.split(new['line'])
287 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
288 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
288
289
289 oldfragments, newfragments = [], []
290 oldfragments, newfragments = [], []
290 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
291 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
291 oldfrag = ''.join(oldwords[i1:i2])
292 oldfrag = ''.join(oldwords[i1:i2])
292 newfrag = ''.join(newwords[j1:j2])
293 newfrag = ''.join(newwords[j1:j2])
293 if tag != 'equal':
294 if tag != 'equal':
294 if oldfrag:
295 if oldfrag:
295 oldfrag = '<del>%s</del>' % oldfrag
296 oldfrag = '<del>%s</del>' % oldfrag
296 if newfrag:
297 if newfrag:
297 newfrag = '<ins>%s</ins>' % newfrag
298 newfrag = '<ins>%s</ins>' % newfrag
298 oldfragments.append(oldfrag)
299 oldfragments.append(oldfrag)
299 newfragments.append(newfrag)
300 newfragments.append(newfrag)
300
301
301 old['line'] = "".join(oldfragments)
302 old['line'] = "".join(oldfragments)
302 new['line'] = "".join(newfragments)
303 new['line'] = "".join(newfragments)
303
304
304 def _highlight_line_udiff(self, line, next_):
305 def _highlight_line_udiff(self, line, next_):
305 """
306 """
306 Highlight inline changes in both lines.
307 Highlight inline changes in both lines.
307 """
308 """
308 start = 0
309 start = 0
309 limit = min(len(line['line']), len(next_['line']))
310 limit = min(len(line['line']), len(next_['line']))
310 while start < limit and line['line'][start] == next_['line'][start]:
311 while start < limit and line['line'][start] == next_['line'][start]:
311 start += 1
312 start += 1
312 end = -1
313 end = -1
313 limit -= start
314 limit -= start
314 while -end <= limit and line['line'][end] == next_['line'][end]:
315 while -end <= limit and line['line'][end] == next_['line'][end]:
315 end -= 1
316 end -= 1
316 end += 1
317 end += 1
317 if start or end:
318 if start or end:
318 def do(l):
319 def do(l):
319 last = end + len(l['line'])
320 last = end + len(l['line'])
320 if l['action'] == Action.ADD:
321 if l['action'] == Action.ADD:
321 tag = 'ins'
322 tag = 'ins'
322 else:
323 else:
323 tag = 'del'
324 tag = 'del'
324 l['line'] = '%s<%s>%s</%s>%s' % (
325 l['line'] = '%s<%s>%s</%s>%s' % (
325 l['line'][:start],
326 l['line'][:start],
326 tag,
327 tag,
327 l['line'][start:last],
328 l['line'][start:last],
328 tag,
329 tag,
329 l['line'][last:]
330 l['line'][last:]
330 )
331 )
331 do(line)
332 do(line)
332 do(next_)
333 do(next_)
333
334
334 def _clean_line(self, line, command):
335 def _clean_line(self, line, command):
335 if command in ['+', '-', ' ']:
336 if command in ['+', '-', ' ']:
336 # only modify the line if it's actually a diff thing
337 # only modify the line if it's actually a diff thing
337 line = line[1:]
338 line = line[1:]
338 return line
339 return line
339
340
340 def _parse_gitdiff(self, inline_diff=True):
341 def _parse_gitdiff(self, inline_diff=True):
341 _files = []
342 _files = []
342 diff_container = lambda arg: arg
343 diff_container = lambda arg: arg
343
344
344 for chunk in self._diff.chunks():
345 for chunk in self._diff.chunks():
345 head = chunk.header
346 head = chunk.header
346
347
347 diff = imap(self._escaper, chunk.diff.splitlines(1))
348 diff = imap(self._escaper, chunk.diff.splitlines(1))
348 raw_diff = chunk.raw
349 raw_diff = chunk.raw
349 limited_diff = False
350 limited_diff = False
350 exceeds_limit = False
351 exceeds_limit = False
351
352
352 op = None
353 op = None
353 stats = {
354 stats = {
354 'added': 0,
355 'added': 0,
355 'deleted': 0,
356 'deleted': 0,
356 'binary': False,
357 'binary': False,
357 'ops': {},
358 'ops': {},
358 }
359 }
359
360
360 if head['deleted_file_mode']:
361 if head['deleted_file_mode']:
361 op = OPS.DEL
362 op = OPS.DEL
362 stats['binary'] = True
363 stats['binary'] = True
363 stats['ops'][DEL_FILENODE] = 'deleted file'
364 stats['ops'][DEL_FILENODE] = 'deleted file'
364
365
365 elif head['new_file_mode']:
366 elif head['new_file_mode']:
366 op = OPS.ADD
367 op = OPS.ADD
367 stats['binary'] = True
368 stats['binary'] = True
368 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
369 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
369 else: # modify operation, can be copy, rename or chmod
370 else: # modify operation, can be copy, rename or chmod
370
371
371 # CHMOD
372 # CHMOD
372 if head['new_mode'] and head['old_mode']:
373 if head['new_mode'] and head['old_mode']:
373 op = OPS.MOD
374 op = OPS.MOD
374 stats['binary'] = True
375 stats['binary'] = True
375 stats['ops'][CHMOD_FILENODE] = (
376 stats['ops'][CHMOD_FILENODE] = (
376 'modified file chmod %s => %s' % (
377 'modified file chmod %s => %s' % (
377 head['old_mode'], head['new_mode']))
378 head['old_mode'], head['new_mode']))
378 # RENAME
379 # RENAME
379 if head['rename_from'] != head['rename_to']:
380 if head['rename_from'] != head['rename_to']:
380 op = OPS.MOD
381 op = OPS.MOD
381 stats['binary'] = True
382 stats['binary'] = True
382 stats['ops'][RENAMED_FILENODE] = (
383 stats['ops'][RENAMED_FILENODE] = (
383 'file renamed from %s to %s' % (
384 'file renamed from %s to %s' % (
384 head['rename_from'], head['rename_to']))
385 head['rename_from'], head['rename_to']))
385 # COPY
386 # COPY
386 if head.get('copy_from') and head.get('copy_to'):
387 if head.get('copy_from') and head.get('copy_to'):
387 op = OPS.MOD
388 op = OPS.MOD
388 stats['binary'] = True
389 stats['binary'] = True
389 stats['ops'][COPIED_FILENODE] = (
390 stats['ops'][COPIED_FILENODE] = (
390 'file copied from %s to %s' % (
391 'file copied from %s to %s' % (
391 head['copy_from'], head['copy_to']))
392 head['copy_from'], head['copy_to']))
392
393
393 # If our new parsed headers didn't match anything fallback to
394 # If our new parsed headers didn't match anything fallback to
394 # old style detection
395 # old style detection
395 if op is None:
396 if op is None:
396 if not head['a_file'] and head['b_file']:
397 if not head['a_file'] and head['b_file']:
397 op = OPS.ADD
398 op = OPS.ADD
398 stats['binary'] = True
399 stats['binary'] = True
399 stats['ops'][NEW_FILENODE] = 'new file'
400 stats['ops'][NEW_FILENODE] = 'new file'
400
401
401 elif head['a_file'] and not head['b_file']:
402 elif head['a_file'] and not head['b_file']:
402 op = OPS.DEL
403 op = OPS.DEL
403 stats['binary'] = True
404 stats['binary'] = True
404 stats['ops'][DEL_FILENODE] = 'deleted file'
405 stats['ops'][DEL_FILENODE] = 'deleted file'
405
406
406 # it's not ADD not DELETE
407 # it's not ADD not DELETE
407 if op is None:
408 if op is None:
408 op = OPS.MOD
409 op = OPS.MOD
409 stats['binary'] = True
410 stats['binary'] = True
410 stats['ops'][MOD_FILENODE] = 'modified file'
411 stats['ops'][MOD_FILENODE] = 'modified file'
411
412
412 # a real non-binary diff
413 # a real non-binary diff
413 if head['a_file'] or head['b_file']:
414 if head['a_file'] or head['b_file']:
414 try:
415 try:
415 raw_diff, chunks, _stats = self._parse_lines(diff)
416 raw_diff, chunks, _stats = self._parse_lines(diff)
416 stats['binary'] = False
417 stats['binary'] = False
417 stats['added'] = _stats[0]
418 stats['added'] = _stats[0]
418 stats['deleted'] = _stats[1]
419 stats['deleted'] = _stats[1]
419 # explicit mark that it's a modified file
420 # explicit mark that it's a modified file
420 if op == OPS.MOD:
421 if op == OPS.MOD:
421 stats['ops'][MOD_FILENODE] = 'modified file'
422 stats['ops'][MOD_FILENODE] = 'modified file'
422 exceeds_limit = len(raw_diff) > self.file_limit
423 exceeds_limit = len(raw_diff) > self.file_limit
423
424
424 # changed from _escaper function so we validate size of
425 # changed from _escaper function so we validate size of
425 # each file instead of the whole diff
426 # each file instead of the whole diff
426 # diff will hide big files but still show small ones
427 # diff will hide big files but still show small ones
427 # from my tests, big files are fairly safe to be parsed
428 # from my tests, big files are fairly safe to be parsed
428 # but the browser is the bottleneck
429 # but the browser is the bottleneck
429 if not self.show_full_diff and exceeds_limit:
430 if not self.show_full_diff and exceeds_limit:
430 raise DiffLimitExceeded('File Limit Exceeded')
431 raise DiffLimitExceeded('File Limit Exceeded')
431
432
432 except DiffLimitExceeded:
433 except DiffLimitExceeded:
433 diff_container = lambda _diff: \
434 diff_container = lambda _diff: \
434 LimitedDiffContainer(
435 LimitedDiffContainer(
435 self.diff_limit, self.cur_diff_size, _diff)
436 self.diff_limit, self.cur_diff_size, _diff)
436
437
437 exceeds_limit = len(raw_diff) > self.file_limit
438 exceeds_limit = len(raw_diff) > self.file_limit
438 limited_diff = True
439 limited_diff = True
439 chunks = []
440 chunks = []
440
441
441 else: # GIT format binary patch, or possibly empty diff
442 else: # GIT format binary patch, or possibly empty diff
442 if head['bin_patch']:
443 if head['bin_patch']:
443 # we have operation already extracted, but we mark simply
444 # we have operation already extracted, but we mark simply
444 # it's a diff we wont show for binary files
445 # it's a diff we wont show for binary files
445 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
446 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
446 chunks = []
447 chunks = []
447
448
448 if chunks and not self.show_full_diff and op == OPS.DEL:
449 if chunks and not self.show_full_diff and op == OPS.DEL:
449 # if not full diff mode show deleted file contents
450 # if not full diff mode show deleted file contents
450 # TODO: anderson: if the view is not too big, there is no way
451 # TODO: anderson: if the view is not too big, there is no way
451 # to see the content of the file
452 # to see the content of the file
452 chunks = []
453 chunks = []
453
454
454 chunks.insert(0, [{
455 chunks.insert(0, [{
455 'old_lineno': '',
456 'old_lineno': '',
456 'new_lineno': '',
457 'new_lineno': '',
457 'action': Action.CONTEXT,
458 'action': Action.CONTEXT,
458 'line': msg,
459 'line': msg,
459 } for _op, msg in stats['ops'].iteritems()
460 } for _op, msg in stats['ops'].iteritems()
460 if _op not in [MOD_FILENODE]])
461 if _op not in [MOD_FILENODE]])
461
462
462 _files.append({
463 _files.append({
463 'filename': safe_unicode(head['b_path']),
464 'filename': safe_unicode(head['b_path']),
464 'old_revision': head['a_blob_id'],
465 'old_revision': head['a_blob_id'],
465 'new_revision': head['b_blob_id'],
466 'new_revision': head['b_blob_id'],
466 'chunks': chunks,
467 'chunks': chunks,
467 'raw_diff': safe_unicode(raw_diff),
468 'raw_diff': safe_unicode(raw_diff),
468 'operation': op,
469 'operation': op,
469 'stats': stats,
470 'stats': stats,
470 'exceeds_limit': exceeds_limit,
471 'exceeds_limit': exceeds_limit,
471 'is_limited_diff': limited_diff,
472 'is_limited_diff': limited_diff,
472 })
473 })
473
474
474 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
475 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
475 OPS.DEL: 2}.get(info['operation'])
476 OPS.DEL: 2}.get(info['operation'])
476
477
477 if not inline_diff:
478 if not inline_diff:
478 return diff_container(sorted(_files, key=sorter))
479 return diff_container(sorted(_files, key=sorter))
479
480
480 # highlight inline changes
481 # highlight inline changes
481 for diff_data in _files:
482 for diff_data in _files:
482 for chunk in diff_data['chunks']:
483 for chunk in diff_data['chunks']:
483 lineiter = iter(chunk)
484 lineiter = iter(chunk)
484 try:
485 try:
485 while 1:
486 while 1:
486 line = lineiter.next()
487 line = lineiter.next()
487 if line['action'] not in (
488 if line['action'] not in (
488 Action.UNMODIFIED, Action.CONTEXT):
489 Action.UNMODIFIED, Action.CONTEXT):
489 nextline = lineiter.next()
490 nextline = lineiter.next()
490 if nextline['action'] in ['unmod', 'context'] or \
491 if nextline['action'] in ['unmod', 'context'] or \
491 nextline['action'] == line['action']:
492 nextline['action'] == line['action']:
492 continue
493 continue
493 self.differ(line, nextline)
494 self.differ(line, nextline)
494 except StopIteration:
495 except StopIteration:
495 pass
496 pass
496
497
497 return diff_container(sorted(_files, key=sorter))
498 return diff_container(sorted(_files, key=sorter))
498
499
499
500 def _check_large_diff(self):
500 # FIXME: NEWDIFFS: dan: this replaces the old _escaper function
501 log.debug('Diff exceeds current diff_limit of %s', self.diff_limit)
501 def _process_line(self, string):
502 """
503 Process a diff line, checks the diff limit
504
505 :param string:
506 """
507
508 self.cur_diff_size += len(string)
509
510 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
502 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
511 raise DiffLimitExceeded('Diff Limit Exceeded')
503 raise DiffLimitExceeded('Diff Limit `%s` Exceeded', self.diff_limit)
512
513 return safe_unicode(string)
514
504
515 # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff
505 # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff
516 def _new_parse_gitdiff(self, inline_diff=True):
506 def _new_parse_gitdiff(self, inline_diff=True):
517 _files = []
507 _files = []
508
509 # this can be overriden later to a LimitedDiffContainer type
518 diff_container = lambda arg: arg
510 diff_container = lambda arg: arg
511
519 for chunk in self._diff.chunks():
512 for chunk in self._diff.chunks():
520 head = chunk.header
513 head = chunk.header
521 log.debug('parsing diff %r' % head)
514 log.debug('parsing diff %r' % head)
522
515
523 diff = imap(self._process_line, chunk.diff.splitlines(1))
524 raw_diff = chunk.raw
516 raw_diff = chunk.raw
525 limited_diff = False
517 limited_diff = False
526 exceeds_limit = False
518 exceeds_limit = False
527 # if 'empty_file_to_modify_and_rename' in head['a_path']:
519
528 # 1/0
529 op = None
520 op = None
530 stats = {
521 stats = {
531 'added': 0,
522 'added': 0,
532 'deleted': 0,
523 'deleted': 0,
533 'binary': False,
524 'binary': False,
534 'old_mode': None,
525 'old_mode': None,
535 'new_mode': None,
526 'new_mode': None,
536 'ops': {},
527 'ops': {},
537 }
528 }
538 if head['old_mode']:
529 if head['old_mode']:
539 stats['old_mode'] = head['old_mode']
530 stats['old_mode'] = head['old_mode']
540 if head['new_mode']:
531 if head['new_mode']:
541 stats['new_mode'] = head['new_mode']
532 stats['new_mode'] = head['new_mode']
542 if head['b_mode']:
533 if head['b_mode']:
543 stats['new_mode'] = head['b_mode']
534 stats['new_mode'] = head['b_mode']
544
535
536 # delete file
545 if head['deleted_file_mode']:
537 if head['deleted_file_mode']:
546 op = OPS.DEL
538 op = OPS.DEL
547 stats['binary'] = True
539 stats['binary'] = True
548 stats['ops'][DEL_FILENODE] = 'deleted file'
540 stats['ops'][DEL_FILENODE] = 'deleted file'
549
541
542 # new file
550 elif head['new_file_mode']:
543 elif head['new_file_mode']:
551 op = OPS.ADD
544 op = OPS.ADD
552 stats['binary'] = True
545 stats['binary'] = True
553 stats['old_mode'] = None
546 stats['old_mode'] = None
554 stats['new_mode'] = head['new_file_mode']
547 stats['new_mode'] = head['new_file_mode']
555 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
548 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
556 else: # modify operation, can be copy, rename or chmod
557
549
550 # modify operation, can be copy, rename or chmod
551 else:
558 # CHMOD
552 # CHMOD
559 if head['new_mode'] and head['old_mode']:
553 if head['new_mode'] and head['old_mode']:
560 op = OPS.MOD
554 op = OPS.MOD
561 stats['binary'] = True
555 stats['binary'] = True
562 stats['ops'][CHMOD_FILENODE] = (
556 stats['ops'][CHMOD_FILENODE] = (
563 'modified file chmod %s => %s' % (
557 'modified file chmod %s => %s' % (
564 head['old_mode'], head['new_mode']))
558 head['old_mode'], head['new_mode']))
565
559
566 # RENAME
560 # RENAME
567 if head['rename_from'] != head['rename_to']:
561 if head['rename_from'] != head['rename_to']:
568 op = OPS.MOD
562 op = OPS.MOD
569 stats['binary'] = True
563 stats['binary'] = True
570 stats['renamed'] = (head['rename_from'], head['rename_to'])
564 stats['renamed'] = (head['rename_from'], head['rename_to'])
571 stats['ops'][RENAMED_FILENODE] = (
565 stats['ops'][RENAMED_FILENODE] = (
572 'file renamed from %s to %s' % (
566 'file renamed from %s to %s' % (
573 head['rename_from'], head['rename_to']))
567 head['rename_from'], head['rename_to']))
574 # COPY
568 # COPY
575 if head.get('copy_from') and head.get('copy_to'):
569 if head.get('copy_from') and head.get('copy_to'):
576 op = OPS.MOD
570 op = OPS.MOD
577 stats['binary'] = True
571 stats['binary'] = True
578 stats['copied'] = (head['copy_from'], head['copy_to'])
572 stats['copied'] = (head['copy_from'], head['copy_to'])
579 stats['ops'][COPIED_FILENODE] = (
573 stats['ops'][COPIED_FILENODE] = (
580 'file copied from %s to %s' % (
574 'file copied from %s to %s' % (
581 head['copy_from'], head['copy_to']))
575 head['copy_from'], head['copy_to']))
582
576
583 # If our new parsed headers didn't match anything fallback to
577 # If our new parsed headers didn't match anything fallback to
584 # old style detection
578 # old style detection
585 if op is None:
579 if op is None:
586 if not head['a_file'] and head['b_file']:
580 if not head['a_file'] and head['b_file']:
587 op = OPS.ADD
581 op = OPS.ADD
588 stats['binary'] = True
582 stats['binary'] = True
589 stats['new_file'] = True
583 stats['new_file'] = True
590 stats['ops'][NEW_FILENODE] = 'new file'
584 stats['ops'][NEW_FILENODE] = 'new file'
591
585
592 elif head['a_file'] and not head['b_file']:
586 elif head['a_file'] and not head['b_file']:
593 op = OPS.DEL
587 op = OPS.DEL
594 stats['binary'] = True
588 stats['binary'] = True
595 stats['ops'][DEL_FILENODE] = 'deleted file'
589 stats['ops'][DEL_FILENODE] = 'deleted file'
596
590
597 # it's not ADD not DELETE
591 # it's not ADD not DELETE
598 if op is None:
592 if op is None:
599 op = OPS.MOD
593 op = OPS.MOD
600 stats['binary'] = True
594 stats['binary'] = True
601 stats['ops'][MOD_FILENODE] = 'modified file'
595 stats['ops'][MOD_FILENODE] = 'modified file'
602
596
603 # a real non-binary diff
597 # a real non-binary diff
604 if head['a_file'] or head['b_file']:
598 if head['a_file'] or head['b_file']:
599 diff = iter(chunk.diff.splitlines(1))
600
601 # append each file to the diff size
602 raw_chunk_size = len(raw_diff)
603
604 exceeds_limit = raw_chunk_size > self.file_limit
605 self.cur_diff_size += raw_chunk_size
606
605 try:
607 try:
608 # Check each file instead of the whole diff.
609 # Diff will hide big files but still show small ones.
610 # From the tests big files are fairly safe to be parsed
611 # but the browser is the bottleneck.
612 if not self.show_full_diff and exceeds_limit:
613 log.debug('File `%s` exceeds current file_limit of %s',
614 safe_unicode(head['b_path']), self.file_limit)
615 raise DiffLimitExceeded(
616 'File Limit %s Exceeded', self.file_limit)
617
618 self._check_large_diff()
619
606 raw_diff, chunks, _stats = self._new_parse_lines(diff)
620 raw_diff, chunks, _stats = self._new_parse_lines(diff)
607 stats['binary'] = False
621 stats['binary'] = False
608 stats['added'] = _stats[0]
622 stats['added'] = _stats[0]
609 stats['deleted'] = _stats[1]
623 stats['deleted'] = _stats[1]
610 # explicit mark that it's a modified file
624 # explicit mark that it's a modified file
611 if op == OPS.MOD:
625 if op == OPS.MOD:
612 stats['ops'][MOD_FILENODE] = 'modified file'
626 stats['ops'][MOD_FILENODE] = 'modified file'
613 exceeds_limit = len(raw_diff) > self.file_limit
614
615 # changed from _escaper function so we validate size of
616 # each file instead of the whole diff
617 # diff will hide big files but still show small ones
618 # from my tests, big files are fairly safe to be parsed
619 # but the browser is the bottleneck
620 if not self.show_full_diff and exceeds_limit:
621 raise DiffLimitExceeded('File Limit Exceeded')
622
627
623 except DiffLimitExceeded:
628 except DiffLimitExceeded:
624 diff_container = lambda _diff: \
629 diff_container = lambda _diff: \
625 LimitedDiffContainer(
630 LimitedDiffContainer(
626 self.diff_limit, self.cur_diff_size, _diff)
631 self.diff_limit, self.cur_diff_size, _diff)
627
632
628 exceeds_limit = len(raw_diff) > self.file_limit
629 limited_diff = True
633 limited_diff = True
630 chunks = []
634 chunks = []
631
635
632 else: # GIT format binary patch, or possibly empty diff
636 else: # GIT format binary patch, or possibly empty diff
633 if head['bin_patch']:
637 if head['bin_patch']:
634 # we have operation already extracted, but we mark simply
638 # we have operation already extracted, but we mark simply
635 # it's a diff we wont show for binary files
639 # it's a diff we wont show for binary files
636 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
640 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
637 chunks = []
641 chunks = []
638
642
643 # Hide content of deleted node by setting empty chunks
639 if chunks and not self.show_full_diff and op == OPS.DEL:
644 if chunks and not self.show_full_diff and op == OPS.DEL:
640 # if not full diff mode show deleted file contents
645 # if not full diff mode show deleted file contents
641 # TODO: anderson: if the view is not too big, there is no way
646 # TODO: anderson: if the view is not too big, there is no way
642 # to see the content of the file
647 # to see the content of the file
643 chunks = []
648 chunks = []
644
649
645 chunks.insert(0, [{
650 chunks.insert(
646 'old_lineno': '',
651 0, [{'old_lineno': '',
647 'new_lineno': '',
652 'new_lineno': '',
648 'action': Action.CONTEXT,
653 'action': Action.CONTEXT,
649 'line': msg,
654 'line': msg,
650 } for _op, msg in stats['ops'].iteritems()
655 } for _op, msg in stats['ops'].iteritems()
651 if _op not in [MOD_FILENODE]])
656 if _op not in [MOD_FILENODE]])
652
657
653 original_filename = safe_unicode(head['a_path'])
658 original_filename = safe_unicode(head['a_path'])
654 _files.append({
659 _files.append({
655 'original_filename': original_filename,
660 'original_filename': original_filename,
656 'filename': safe_unicode(head['b_path']),
661 'filename': safe_unicode(head['b_path']),
657 'old_revision': head['a_blob_id'],
662 'old_revision': head['a_blob_id'],
658 'new_revision': head['b_blob_id'],
663 'new_revision': head['b_blob_id'],
659 'chunks': chunks,
664 'chunks': chunks,
660 'raw_diff': safe_unicode(raw_diff),
665 'raw_diff': safe_unicode(raw_diff),
661 'operation': op,
666 'operation': op,
662 'stats': stats,
667 'stats': stats,
663 'exceeds_limit': exceeds_limit,
668 'exceeds_limit': exceeds_limit,
664 'is_limited_diff': limited_diff,
669 'is_limited_diff': limited_diff,
665 })
670 })
666
671
667
668 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
672 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
669 OPS.DEL: 2}.get(info['operation'])
673 OPS.DEL: 2}.get(info['operation'])
670
674
671 return diff_container(sorted(_files, key=sorter))
675 return diff_container(sorted(_files, key=sorter))
672
676
673 # FIXME: NEWDIFFS: dan: this gets replaced by _new_parse_lines
677 # FIXME: NEWDIFFS: dan: this gets replaced by _new_parse_lines
674 def _parse_lines(self, diff):
678 def _parse_lines(self, diff):
675 """
679 """
676 Parse the diff an return data for the template.
680 Parse the diff an return data for the template.
677 """
681 """
678
682
679 lineiter = iter(diff)
683 lineiter = iter(diff)
680 stats = [0, 0]
684 stats = [0, 0]
681 chunks = []
685 chunks = []
682 raw_diff = []
686 raw_diff = []
683
687
684 try:
688 try:
685 line = lineiter.next()
689 line = lineiter.next()
686
690
687 while line:
691 while line:
688 raw_diff.append(line)
692 raw_diff.append(line)
689 lines = []
693 lines = []
690 chunks.append(lines)
694 chunks.append(lines)
691
695
692 match = self._chunk_re.match(line)
696 match = self._chunk_re.match(line)
693
697
694 if not match:
698 if not match:
695 break
699 break
696
700
697 gr = match.groups()
701 gr = match.groups()
698 (old_line, old_end,
702 (old_line, old_end,
699 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
703 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
700 old_line -= 1
704 old_line -= 1
701 new_line -= 1
705 new_line -= 1
702
706
703 context = len(gr) == 5
707 context = len(gr) == 5
704 old_end += old_line
708 old_end += old_line
705 new_end += new_line
709 new_end += new_line
706
710
707 if context:
711 if context:
708 # skip context only if it's first line
712 # skip context only if it's first line
709 if int(gr[0]) > 1:
713 if int(gr[0]) > 1:
710 lines.append({
714 lines.append({
711 'old_lineno': '...',
715 'old_lineno': '...',
712 'new_lineno': '...',
716 'new_lineno': '...',
713 'action': Action.CONTEXT,
717 'action': Action.CONTEXT,
714 'line': line,
718 'line': line,
715 })
719 })
716
720
717 line = lineiter.next()
721 line = lineiter.next()
718
722
719 while old_line < old_end or new_line < new_end:
723 while old_line < old_end or new_line < new_end:
720 command = ' '
724 command = ' '
721 if line:
725 if line:
722 command = line[0]
726 command = line[0]
723
727
724 affects_old = affects_new = False
728 affects_old = affects_new = False
725
729
726 # ignore those if we don't expect them
730 # ignore those if we don't expect them
727 if command in '#@':
731 if command in '#@':
728 continue
732 continue
729 elif command == '+':
733 elif command == '+':
730 affects_new = True
734 affects_new = True
731 action = Action.ADD
735 action = Action.ADD
732 stats[0] += 1
736 stats[0] += 1
733 elif command == '-':
737 elif command == '-':
734 affects_old = True
738 affects_old = True
735 action = Action.DELETE
739 action = Action.DELETE
736 stats[1] += 1
740 stats[1] += 1
737 else:
741 else:
738 affects_old = affects_new = True
742 affects_old = affects_new = True
739 action = Action.UNMODIFIED
743 action = Action.UNMODIFIED
740
744
741 if not self._newline_marker.match(line):
745 if not self._newline_marker.match(line):
742 old_line += affects_old
746 old_line += affects_old
743 new_line += affects_new
747 new_line += affects_new
744 lines.append({
748 lines.append({
745 'old_lineno': affects_old and old_line or '',
749 'old_lineno': affects_old and old_line or '',
746 'new_lineno': affects_new and new_line or '',
750 'new_lineno': affects_new and new_line or '',
747 'action': action,
751 'action': action,
748 'line': self._clean_line(line, command)
752 'line': self._clean_line(line, command)
749 })
753 })
750 raw_diff.append(line)
754 raw_diff.append(line)
751
755
752 line = lineiter.next()
756 line = lineiter.next()
753
757
754 if self._newline_marker.match(line):
758 if self._newline_marker.match(line):
755 # we need to append to lines, since this is not
759 # we need to append to lines, since this is not
756 # counted in the line specs of diff
760 # counted in the line specs of diff
757 lines.append({
761 lines.append({
758 'old_lineno': '...',
762 'old_lineno': '...',
759 'new_lineno': '...',
763 'new_lineno': '...',
760 'action': Action.CONTEXT,
764 'action': Action.CONTEXT,
761 'line': self._clean_line(line, command)
765 'line': self._clean_line(line, command)
762 })
766 })
763
767
764 except StopIteration:
768 except StopIteration:
765 pass
769 pass
766 return ''.join(raw_diff), chunks, stats
770 return ''.join(raw_diff), chunks, stats
767
771
768 # FIXME: NEWDIFFS: dan: this replaces _parse_lines
772 # FIXME: NEWDIFFS: dan: this replaces _parse_lines
769 def _new_parse_lines(self, diff):
773 def _new_parse_lines(self, diff_iter):
770 """
774 """
771 Parse the diff an return data for the template.
775 Parse the diff an return data for the template.
772 """
776 """
773
777
774 lineiter = iter(diff)
775 stats = [0, 0]
778 stats = [0, 0]
776 chunks = []
779 chunks = []
777 raw_diff = []
780 raw_diff = []
778
781
782 diff_iter = imap(lambda s: safe_unicode(s), diff_iter)
783
779 try:
784 try:
780 line = lineiter.next()
785 line = diff_iter.next()
781
786
782 while line:
787 while line:
783 raw_diff.append(line)
788 raw_diff.append(line)
784 match = self._chunk_re.match(line)
789 match = self._chunk_re.match(line)
785
790
786 if not match:
791 if not match:
787 break
792 break
788
793
789 gr = match.groups()
794 gr = match.groups()
790 (old_line, old_end,
795 (old_line, old_end,
791 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
796 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
792
797
793 lines = []
798 lines = []
794 hunk = {
799 hunk = {
795 'section_header': gr[-1],
800 'section_header': gr[-1],
796 'source_start': old_line,
801 'source_start': old_line,
797 'source_length': old_end,
802 'source_length': old_end,
798 'target_start': new_line,
803 'target_start': new_line,
799 'target_length': new_end,
804 'target_length': new_end,
800 'lines': lines,
805 'lines': lines,
801 }
806 }
802 chunks.append(hunk)
807 chunks.append(hunk)
803
808
804 old_line -= 1
809 old_line -= 1
805 new_line -= 1
810 new_line -= 1
806
811
807 context = len(gr) == 5
812 context = len(gr) == 5
808 old_end += old_line
813 old_end += old_line
809 new_end += new_line
814 new_end += new_line
810
815
811 line = lineiter.next()
816 line = diff_iter.next()
812
817
813 while old_line < old_end or new_line < new_end:
818 while old_line < old_end or new_line < new_end:
814 command = ' '
819 command = ' '
815 if line:
820 if line:
816 command = line[0]
821 command = line[0]
817
822
818 affects_old = affects_new = False
823 affects_old = affects_new = False
819
824
820 # ignore those if we don't expect them
825 # ignore those if we don't expect them
821 if command in '#@':
826 if command in '#@':
822 continue
827 continue
823 elif command == '+':
828 elif command == '+':
824 affects_new = True
829 affects_new = True
825 action = Action.ADD
830 action = Action.ADD
826 stats[0] += 1
831 stats[0] += 1
827 elif command == '-':
832 elif command == '-':
828 affects_old = True
833 affects_old = True
829 action = Action.DELETE
834 action = Action.DELETE
830 stats[1] += 1
835 stats[1] += 1
831 else:
836 else:
832 affects_old = affects_new = True
837 affects_old = affects_new = True
833 action = Action.UNMODIFIED
838 action = Action.UNMODIFIED
834
839
835 if not self._newline_marker.match(line):
840 if not self._newline_marker.match(line):
836 old_line += affects_old
841 old_line += affects_old
837 new_line += affects_new
842 new_line += affects_new
838 lines.append({
843 lines.append({
839 'old_lineno': affects_old and old_line or '',
844 'old_lineno': affects_old and old_line or '',
840 'new_lineno': affects_new and new_line or '',
845 'new_lineno': affects_new and new_line or '',
841 'action': action,
846 'action': action,
842 'line': self._clean_line(line, command)
847 'line': self._clean_line(line, command)
843 })
848 })
844 raw_diff.append(line)
849 raw_diff.append(line)
845
850
846 line = lineiter.next()
851 line = diff_iter.next()
847
852
848 if self._newline_marker.match(line):
853 if self._newline_marker.match(line):
849 # we need to append to lines, since this is not
854 # we need to append to lines, since this is not
850 # counted in the line specs of diff
855 # counted in the line specs of diff
851 if affects_old:
856 if affects_old:
852 action = Action.OLD_NO_NL
857 action = Action.OLD_NO_NL
853 elif affects_new:
858 elif affects_new:
854 action = Action.NEW_NO_NL
859 action = Action.NEW_NO_NL
855 else:
860 else:
856 raise Exception('invalid context for no newline')
861 raise Exception('invalid context for no newline')
857
862
858 lines.append({
863 lines.append({
859 'old_lineno': None,
864 'old_lineno': None,
860 'new_lineno': None,
865 'new_lineno': None,
861 'action': action,
866 'action': action,
862 'line': self._clean_line(line, command)
867 'line': self._clean_line(line, command)
863 })
868 })
864
869
865 except StopIteration:
870 except StopIteration:
866 pass
871 pass
872
867 return ''.join(raw_diff), chunks, stats
873 return ''.join(raw_diff), chunks, stats
868
874
869 def _safe_id(self, idstring):
875 def _safe_id(self, idstring):
870 """Make a string safe for including in an id attribute.
876 """Make a string safe for including in an id attribute.
871
877
872 The HTML spec says that id attributes 'must begin with
878 The HTML spec says that id attributes 'must begin with
873 a letter ([A-Za-z]) and may be followed by any number
879 a letter ([A-Za-z]) and may be followed by any number
874 of letters, digits ([0-9]), hyphens ("-"), underscores
880 of letters, digits ([0-9]), hyphens ("-"), underscores
875 ("_"), colons (":"), and periods (".")'. These regexps
881 ("_"), colons (":"), and periods (".")'. These regexps
876 are slightly over-zealous, in that they remove colons
882 are slightly over-zealous, in that they remove colons
877 and periods unnecessarily.
883 and periods unnecessarily.
878
884
879 Whitespace is transformed into underscores, and then
885 Whitespace is transformed into underscores, and then
880 anything which is not a hyphen or a character that
886 anything which is not a hyphen or a character that
881 matches \w (alphanumerics and underscore) is removed.
887 matches \w (alphanumerics and underscore) is removed.
882
888
883 """
889 """
884 # Transform all whitespace to underscore
890 # Transform all whitespace to underscore
885 idstring = re.sub(r'\s', "_", '%s' % idstring)
891 idstring = re.sub(r'\s', "_", '%s' % idstring)
886 # Remove everything that is not a hyphen or a member of \w
892 # Remove everything that is not a hyphen or a member of \w
887 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
893 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
888 return idstring
894 return idstring
889
895
890 def prepare(self, inline_diff=True):
896 def prepare(self, inline_diff=True):
891 """
897 """
892 Prepare the passed udiff for HTML rendering.
898 Prepare the passed udiff for HTML rendering.
893
899
894 :return: A list of dicts with diff information.
900 :return: A list of dicts with diff information.
895 """
901 """
896 parsed = self._parser(inline_diff=inline_diff)
902 parsed = self._parser(inline_diff=inline_diff)
897 self.parsed = True
903 self.parsed = True
898 self.parsed_diff = parsed
904 self.parsed_diff = parsed
899 return parsed
905 return parsed
900
906
901 def as_raw(self, diff_lines=None):
907 def as_raw(self, diff_lines=None):
902 """
908 """
903 Returns raw diff as a byte string
909 Returns raw diff as a byte string
904 """
910 """
905 return self._diff.raw
911 return self._diff.raw
906
912
907 def as_html(self, table_class='code-difftable', line_class='line',
913 def as_html(self, table_class='code-difftable', line_class='line',
908 old_lineno_class='lineno old', new_lineno_class='lineno new',
914 old_lineno_class='lineno old', new_lineno_class='lineno new',
909 code_class='code', enable_comments=False, parsed_lines=None):
915 code_class='code', enable_comments=False, parsed_lines=None):
910 """
916 """
911 Return given diff as html table with customized css classes
917 Return given diff as html table with customized css classes
912 """
918 """
913 def _link_to_if(condition, label, url):
919 def _link_to_if(condition, label, url):
914 """
920 """
915 Generates a link if condition is meet or just the label if not.
921 Generates a link if condition is meet or just the label if not.
916 """
922 """
917
923
918 if condition:
924 if condition:
919 return '''<a href="%(url)s" class="tooltip"
925 return '''<a href="%(url)s" class="tooltip"
920 title="%(title)s">%(label)s</a>''' % {
926 title="%(title)s">%(label)s</a>''' % {
921 'title': _('Click to select line'),
927 'title': _('Click to select line'),
922 'url': url,
928 'url': url,
923 'label': label
929 'label': label
924 }
930 }
925 else:
931 else:
926 return label
932 return label
927 if not self.parsed:
933 if not self.parsed:
928 self.prepare()
934 self.prepare()
929
935
930 diff_lines = self.parsed_diff
936 diff_lines = self.parsed_diff
931 if parsed_lines:
937 if parsed_lines:
932 diff_lines = parsed_lines
938 diff_lines = parsed_lines
933
939
934 _html_empty = True
940 _html_empty = True
935 _html = []
941 _html = []
936 _html.append('''<table class="%(table_class)s">\n''' % {
942 _html.append('''<table class="%(table_class)s">\n''' % {
937 'table_class': table_class
943 'table_class': table_class
938 })
944 })
939
945
940 for diff in diff_lines:
946 for diff in diff_lines:
941 for line in diff['chunks']:
947 for line in diff['chunks']:
942 _html_empty = False
948 _html_empty = False
943 for change in line:
949 for change in line:
944 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
950 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
945 'lc': line_class,
951 'lc': line_class,
946 'action': change['action']
952 'action': change['action']
947 })
953 })
948 anchor_old_id = ''
954 anchor_old_id = ''
949 anchor_new_id = ''
955 anchor_new_id = ''
950 anchor_old = "%(filename)s_o%(oldline_no)s" % {
956 anchor_old = "%(filename)s_o%(oldline_no)s" % {
951 'filename': self._safe_id(diff['filename']),
957 'filename': self._safe_id(diff['filename']),
952 'oldline_no': change['old_lineno']
958 'oldline_no': change['old_lineno']
953 }
959 }
954 anchor_new = "%(filename)s_n%(oldline_no)s" % {
960 anchor_new = "%(filename)s_n%(oldline_no)s" % {
955 'filename': self._safe_id(diff['filename']),
961 'filename': self._safe_id(diff['filename']),
956 'oldline_no': change['new_lineno']
962 'oldline_no': change['new_lineno']
957 }
963 }
958 cond_old = (change['old_lineno'] != '...' and
964 cond_old = (change['old_lineno'] != '...' and
959 change['old_lineno'])
965 change['old_lineno'])
960 cond_new = (change['new_lineno'] != '...' and
966 cond_new = (change['new_lineno'] != '...' and
961 change['new_lineno'])
967 change['new_lineno'])
962 if cond_old:
968 if cond_old:
963 anchor_old_id = 'id="%s"' % anchor_old
969 anchor_old_id = 'id="%s"' % anchor_old
964 if cond_new:
970 if cond_new:
965 anchor_new_id = 'id="%s"' % anchor_new
971 anchor_new_id = 'id="%s"' % anchor_new
966
972
967 if change['action'] != Action.CONTEXT:
973 if change['action'] != Action.CONTEXT:
968 anchor_link = True
974 anchor_link = True
969 else:
975 else:
970 anchor_link = False
976 anchor_link = False
971
977
972 ###########################################################
978 ###########################################################
973 # COMMENT ICONS
979 # COMMENT ICONS
974 ###########################################################
980 ###########################################################
975 _html.append('''\t<td class="add-comment-line"><span class="add-comment-content">''')
981 _html.append('''\t<td class="add-comment-line"><span class="add-comment-content">''')
976
982
977 if enable_comments and change['action'] != Action.CONTEXT:
983 if enable_comments and change['action'] != Action.CONTEXT:
978 _html.append('''<a href="#"><span class="icon-comment-add"></span></a>''')
984 _html.append('''<a href="#"><span class="icon-comment-add"></span></a>''')
979
985
980 _html.append('''</span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>\n''')
986 _html.append('''</span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>\n''')
981
987
982 ###########################################################
988 ###########################################################
983 # OLD LINE NUMBER
989 # OLD LINE NUMBER
984 ###########################################################
990 ###########################################################
985 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
991 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
986 'a_id': anchor_old_id,
992 'a_id': anchor_old_id,
987 'olc': old_lineno_class
993 'olc': old_lineno_class
988 })
994 })
989
995
990 _html.append('''%(link)s''' % {
996 _html.append('''%(link)s''' % {
991 'link': _link_to_if(anchor_link, change['old_lineno'],
997 'link': _link_to_if(anchor_link, change['old_lineno'],
992 '#%s' % anchor_old)
998 '#%s' % anchor_old)
993 })
999 })
994 _html.append('''</td>\n''')
1000 _html.append('''</td>\n''')
995 ###########################################################
1001 ###########################################################
996 # NEW LINE NUMBER
1002 # NEW LINE NUMBER
997 ###########################################################
1003 ###########################################################
998
1004
999 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
1005 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
1000 'a_id': anchor_new_id,
1006 'a_id': anchor_new_id,
1001 'nlc': new_lineno_class
1007 'nlc': new_lineno_class
1002 })
1008 })
1003
1009
1004 _html.append('''%(link)s''' % {
1010 _html.append('''%(link)s''' % {
1005 'link': _link_to_if(anchor_link, change['new_lineno'],
1011 'link': _link_to_if(anchor_link, change['new_lineno'],
1006 '#%s' % anchor_new)
1012 '#%s' % anchor_new)
1007 })
1013 })
1008 _html.append('''</td>\n''')
1014 _html.append('''</td>\n''')
1009 ###########################################################
1015 ###########################################################
1010 # CODE
1016 # CODE
1011 ###########################################################
1017 ###########################################################
1012 code_classes = [code_class]
1018 code_classes = [code_class]
1013 if (not enable_comments or
1019 if (not enable_comments or
1014 change['action'] == Action.CONTEXT):
1020 change['action'] == Action.CONTEXT):
1015 code_classes.append('no-comment')
1021 code_classes.append('no-comment')
1016 _html.append('\t<td class="%s">' % ' '.join(code_classes))
1022 _html.append('\t<td class="%s">' % ' '.join(code_classes))
1017 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
1023 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
1018 'code': change['line']
1024 'code': change['line']
1019 })
1025 })
1020
1026
1021 _html.append('''\t</td>''')
1027 _html.append('''\t</td>''')
1022 _html.append('''\n</tr>\n''')
1028 _html.append('''\n</tr>\n''')
1023 _html.append('''</table>''')
1029 _html.append('''</table>''')
1024 if _html_empty:
1030 if _html_empty:
1025 return None
1031 return None
1026 return ''.join(_html)
1032 return ''.join(_html)
1027
1033
1028 def stat(self):
1034 def stat(self):
1029 """
1035 """
1030 Returns tuple of added, and removed lines for this instance
1036 Returns tuple of added, and removed lines for this instance
1031 """
1037 """
1032 return self.adds, self.removes
1038 return self.adds, self.removes
1033
1039
1034 def get_context_of_line(
1040 def get_context_of_line(
1035 self, path, diff_line=None, context_before=3, context_after=3):
1041 self, path, diff_line=None, context_before=3, context_after=3):
1036 """
1042 """
1037 Returns the context lines for the specified diff line.
1043 Returns the context lines for the specified diff line.
1038
1044
1039 :type diff_line: :class:`DiffLineNumber`
1045 :type diff_line: :class:`DiffLineNumber`
1040 """
1046 """
1041 assert self.parsed, "DiffProcessor is not initialized."
1047 assert self.parsed, "DiffProcessor is not initialized."
1042
1048
1043 if None not in diff_line:
1049 if None not in diff_line:
1044 raise ValueError(
1050 raise ValueError(
1045 "Cannot specify both line numbers: {}".format(diff_line))
1051 "Cannot specify both line numbers: {}".format(diff_line))
1046
1052
1047 file_diff = self._get_file_diff(path)
1053 file_diff = self._get_file_diff(path)
1048 chunk, idx = self._find_chunk_line_index(file_diff, diff_line)
1054 chunk, idx = self._find_chunk_line_index(file_diff, diff_line)
1049
1055
1050 first_line_to_include = max(idx - context_before, 0)
1056 first_line_to_include = max(idx - context_before, 0)
1051 first_line_after_context = idx + context_after + 1
1057 first_line_after_context = idx + context_after + 1
1052 context_lines = chunk[first_line_to_include:first_line_after_context]
1058 context_lines = chunk[first_line_to_include:first_line_after_context]
1053
1059
1054 line_contents = [
1060 line_contents = [
1055 _context_line(line) for line in context_lines
1061 _context_line(line) for line in context_lines
1056 if _is_diff_content(line)]
1062 if _is_diff_content(line)]
1057 # TODO: johbo: Interim fixup, the diff chunks drop the final newline.
1063 # TODO: johbo: Interim fixup, the diff chunks drop the final newline.
1058 # Once they are fixed, we can drop this line here.
1064 # Once they are fixed, we can drop this line here.
1059 if line_contents:
1065 if line_contents:
1060 line_contents[-1] = (
1066 line_contents[-1] = (
1061 line_contents[-1][0], line_contents[-1][1].rstrip('\n') + '\n')
1067 line_contents[-1][0], line_contents[-1][1].rstrip('\n') + '\n')
1062 return line_contents
1068 return line_contents
1063
1069
1064 def find_context(self, path, context, offset=0):
1070 def find_context(self, path, context, offset=0):
1065 """
1071 """
1066 Finds the given `context` inside of the diff.
1072 Finds the given `context` inside of the diff.
1067
1073
1068 Use the parameter `offset` to specify which offset the target line has
1074 Use the parameter `offset` to specify which offset the target line has
1069 inside of the given `context`. This way the correct diff line will be
1075 inside of the given `context`. This way the correct diff line will be
1070 returned.
1076 returned.
1071
1077
1072 :param offset: Shall be used to specify the offset of the main line
1078 :param offset: Shall be used to specify the offset of the main line
1073 within the given `context`.
1079 within the given `context`.
1074 """
1080 """
1075 if offset < 0 or offset >= len(context):
1081 if offset < 0 or offset >= len(context):
1076 raise ValueError(
1082 raise ValueError(
1077 "Only positive values up to the length of the context "
1083 "Only positive values up to the length of the context "
1078 "minus one are allowed.")
1084 "minus one are allowed.")
1079
1085
1080 matches = []
1086 matches = []
1081 file_diff = self._get_file_diff(path)
1087 file_diff = self._get_file_diff(path)
1082
1088
1083 for chunk in file_diff['chunks']:
1089 for chunk in file_diff['chunks']:
1084 context_iter = iter(context)
1090 context_iter = iter(context)
1085 for line_idx, line in enumerate(chunk):
1091 for line_idx, line in enumerate(chunk):
1086 try:
1092 try:
1087 if _context_line(line) == context_iter.next():
1093 if _context_line(line) == context_iter.next():
1088 continue
1094 continue
1089 except StopIteration:
1095 except StopIteration:
1090 matches.append((line_idx, chunk))
1096 matches.append((line_idx, chunk))
1091 context_iter = iter(context)
1097 context_iter = iter(context)
1092
1098
1093 # Increment position and triger StopIteration
1099 # Increment position and triger StopIteration
1094 # if we had a match at the end
1100 # if we had a match at the end
1095 line_idx += 1
1101 line_idx += 1
1096 try:
1102 try:
1097 context_iter.next()
1103 context_iter.next()
1098 except StopIteration:
1104 except StopIteration:
1099 matches.append((line_idx, chunk))
1105 matches.append((line_idx, chunk))
1100
1106
1101 effective_offset = len(context) - offset
1107 effective_offset = len(context) - offset
1102 found_at_diff_lines = [
1108 found_at_diff_lines = [
1103 _line_to_diff_line_number(chunk[idx - effective_offset])
1109 _line_to_diff_line_number(chunk[idx - effective_offset])
1104 for idx, chunk in matches]
1110 for idx, chunk in matches]
1105
1111
1106 return found_at_diff_lines
1112 return found_at_diff_lines
1107
1113
1108 def _get_file_diff(self, path):
1114 def _get_file_diff(self, path):
1109 for file_diff in self.parsed_diff:
1115 for file_diff in self.parsed_diff:
1110 if file_diff['filename'] == path:
1116 if file_diff['filename'] == path:
1111 break
1117 break
1112 else:
1118 else:
1113 raise FileNotInDiffException("File {} not in diff".format(path))
1119 raise FileNotInDiffException("File {} not in diff".format(path))
1114 return file_diff
1120 return file_diff
1115
1121
1116 def _find_chunk_line_index(self, file_diff, diff_line):
1122 def _find_chunk_line_index(self, file_diff, diff_line):
1117 for chunk in file_diff['chunks']:
1123 for chunk in file_diff['chunks']:
1118 for idx, line in enumerate(chunk):
1124 for idx, line in enumerate(chunk):
1119 if line['old_lineno'] == diff_line.old:
1125 if line['old_lineno'] == diff_line.old:
1120 return chunk, idx
1126 return chunk, idx
1121 if line['new_lineno'] == diff_line.new:
1127 if line['new_lineno'] == diff_line.new:
1122 return chunk, idx
1128 return chunk, idx
1123 raise LineNotInDiffException(
1129 raise LineNotInDiffException(
1124 "The line {} is not part of the diff.".format(diff_line))
1130 "The line {} is not part of the diff.".format(diff_line))
1125
1131
1126
1132
1127 def _is_diff_content(line):
1133 def _is_diff_content(line):
1128 return line['action'] in (
1134 return line['action'] in (
1129 Action.UNMODIFIED, Action.ADD, Action.DELETE)
1135 Action.UNMODIFIED, Action.ADD, Action.DELETE)
1130
1136
1131
1137
1132 def _context_line(line):
1138 def _context_line(line):
1133 return (line['action'], line['line'])
1139 return (line['action'], line['line'])
1134
1140
1135
1141
1136 DiffLineNumber = collections.namedtuple('DiffLineNumber', ['old', 'new'])
1142 DiffLineNumber = collections.namedtuple('DiffLineNumber', ['old', 'new'])
1137
1143
1138
1144
1139 def _line_to_diff_line_number(line):
1145 def _line_to_diff_line_number(line):
1140 new_line_no = line['new_lineno'] or None
1146 new_line_no = line['new_lineno'] or None
1141 old_line_no = line['old_lineno'] or None
1147 old_line_no = line['old_lineno'] or None
1142 return DiffLineNumber(old=old_line_no, new=new_line_no)
1148 return DiffLineNumber(old=old_line_no, new=new_line_no)
1143
1149
1144
1150
1145 class FileNotInDiffException(Exception):
1151 class FileNotInDiffException(Exception):
1146 """
1152 """
1147 Raised when the context for a missing file is requested.
1153 Raised when the context for a missing file is requested.
1148
1154
1149 If you request the context for a line in a file which is not part of the
1155 If you request the context for a line in a file which is not part of the
1150 given diff, then this exception is raised.
1156 given diff, then this exception is raised.
1151 """
1157 """
1152
1158
1153
1159
1154 class LineNotInDiffException(Exception):
1160 class LineNotInDiffException(Exception):
1155 """
1161 """
1156 Raised when the context for a missing line is requested.
1162 Raised when the context for a missing line is requested.
1157
1163
1158 If you request the context for a line in a file and this line is not
1164 If you request the context for a line in a file and this line is not
1159 part of the given diff, then this exception is raised.
1165 part of the given diff, then this exception is raised.
1160 """
1166 """
1161
1167
1162
1168
1163 class DiffLimitExceeded(Exception):
1169 class DiffLimitExceeded(Exception):
1164 pass
1170 pass
General Comments 0
You need to be logged in to leave comments. Login now