##// END OF EJS Templates
diffs: use whole chunk diff to calculate if it's oversized or not....
dan -
r2070:7939c6bf default
parent child Browse files
Show More
@@ -1,733 +1,739 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 127.0.0.1
47 47 port = 5000
48 48
49 49 ##################################
50 50 ## WAITRESS WSGI SERVER ##
51 51 ## Recommended for Development ##
52 52 ##################################
53 53
54 54 use = egg:waitress#main
55 55 ## number of worker threads
56 56 threads = 5
57 57 ## MAX BODY SIZE 100GB
58 58 max_request_body_size = 107374182400
59 59 ## Use poll instead of select, fixes file descriptors limits problems.
60 60 ## May not work on old windows systems.
61 61 asyncore_use_poll = true
62 62
63 63
64 64 ##########################
65 65 ## GUNICORN WSGI SERVER ##
66 66 ##########################
67 67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68 68
69 69 #use = egg:gunicorn#main
70 70 ## Sets the number of process workers. You must set `instance_id = *`
71 71 ## when this option is set to more than one worker, recommended
72 72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 73 ## The `instance_id = *` must be set in the [app:main] section below
74 74 #workers = 2
75 75 ## number of threads for each of the worker, must be set to 1 for gevent
76 76 ## generally recommened to be at 1
77 77 #threads = 1
78 78 ## process name
79 79 #proc_name = rhodecode
80 80 ## type of worker class, one of sync, gevent
81 81 ## recommended for bigger setup is using of of other than sync one
82 82 #worker_class = sync
83 83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 84 #worker_connections = 10
85 85 ## max number of requests that worker will handle before being gracefully
86 86 ## restarted, could prevent memory leaks
87 87 #max_requests = 1000
88 88 #max_requests_jitter = 30
89 89 ## amount of time a worker can spend with handling a request before it
90 90 ## gets killed and restarted. Set to 6hrs
91 91 #timeout = 21600
92 92
93 93
94 94 ## prefix middleware for RhodeCode.
95 95 ## recommended when using proxy setup.
96 96 ## allows to set RhodeCode under a prefix in server.
97 97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 98 ## And set your prefix like: `prefix = /custom_prefix`
99 99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 100 ## to make your cookies only work on prefix url
101 101 [filter:proxy-prefix]
102 102 use = egg:PasteDeploy#prefix
103 103 prefix = /
104 104
105 105 [app:main]
106 106 use = egg:rhodecode-enterprise-ce
107 107
108 108 ## enable proxy prefix middleware, defined above
109 109 #filter-with = proxy-prefix
110 110
111 111 # During development the we want to have the debug toolbar enabled
112 112 pyramid.includes =
113 113 pyramid_debugtoolbar
114 114 rhodecode.utils.debugtoolbar
115 115 rhodecode.lib.middleware.request_wrapper
116 116
117 117 pyramid.reload_templates = true
118 118
119 119 debugtoolbar.hosts = 0.0.0.0/0
120 120 debugtoolbar.exclude_prefixes =
121 121 /css
122 122 /fonts
123 123 /images
124 124 /js
125 125
126 126 ## RHODECODE PLUGINS ##
127 127 rhodecode.includes =
128 128 rhodecode.api
129 129
130 130
131 131 # api prefix url
132 132 rhodecode.api.url = /_admin/api
133 133
134 134
135 135 ## END RHODECODE PLUGINS ##
136 136
137 137 ## encryption key used to encrypt social plugin tokens,
138 138 ## remote_urls with credentials etc, if not set it defaults to
139 139 ## `beaker.session.secret`
140 140 #rhodecode.encrypted_values.secret =
141 141
142 142 ## decryption strict mode (enabled by default). It controls if decryption raises
143 143 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
144 144 #rhodecode.encrypted_values.strict = false
145 145
146 146 ## return gzipped responses from Rhodecode (static files/application)
147 147 gzip_responses = false
148 148
149 149 ## autogenerate javascript routes file on startup
150 150 generate_js_files = false
151 151
152 152 ## Optional Languages
153 153 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
154 154 lang = en
155 155
156 156 ## perform a full repository scan on each server start, this should be
157 157 ## set to false after first startup, to allow faster server restarts.
158 158 startup.import_repos = false
159 159
160 160 ## Uncomment and set this path to use archive download cache.
161 161 ## Once enabled, generated archives will be cached at this location
162 162 ## and served from the cache during subsequent requests for the same archive of
163 163 ## the repository.
164 164 #archive_cache_dir = /tmp/tarballcache
165 165
166 166 ## change this to unique ID for security
167 167 app_instance_uuid = rc-production
168 168
169 ## cut off limit for large diffs (size in bytes)
170 cut_off_limit_diff = 1024000
171 cut_off_limit_file = 256000
169 ## cut off limit for large diffs (size in bytes). If overall diff size on
170 ## commit, or pull request exceeds this limit this diff will be displayed
171 ## partially. E.g 512000 == 512Kb
172 cut_off_limit_diff = 512000
173
174 ## cut off limit for large files inside diffs (size in bytes). Each individual
175 ## file inside diff which exceeds this limit will be displayed partially.
176 ## E.g 128000 == 128Kb
177 cut_off_limit_file = 128000
172 178
173 179 ## use cache version of scm repo everywhere
174 180 vcs_full_cache = true
175 181
176 182 ## force https in RhodeCode, fixes https redirects, assumes it's always https
177 183 ## Normally this is controlled by proper http flags sent from http server
178 184 force_https = false
179 185
180 186 ## use Strict-Transport-Security headers
181 187 use_htsts = false
182 188
183 189 ## number of commits stats will parse on each iteration
184 190 commit_parse_limit = 25
185 191
186 192 ## git rev filter option, --all is the default filter, if you need to
187 193 ## hide all refs in changelog switch this to --branches --tags
188 194 git_rev_filter = --branches --tags
189 195
190 196 # Set to true if your repos are exposed using the dumb protocol
191 197 git_update_server_info = false
192 198
193 199 ## RSS/ATOM feed options
194 200 rss_cut_off_limit = 256000
195 201 rss_items_per_page = 10
196 202 rss_include_diff = false
197 203
198 204 ## gist URL alias, used to create nicer urls for gist. This should be an
199 205 ## url that does rewrites to _admin/gists/{gistid}.
200 206 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
201 207 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
202 208 gist_alias_url =
203 209
204 210 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
205 211 ## used for access.
206 212 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
207 213 ## came from the the logged in user who own this authentication token.
208 214 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
209 215 ## authentication token. Such view would be only accessible when used together
210 216 ## with this authentication token
211 217 ##
212 218 ## list of all views can be found under `/_admin/permissions/auth_token_access`
213 219 ## The list should be "," separated and on a single line.
214 220 ##
215 221 ## Most common views to enable:
216 222 # RepoCommitsView:repo_commit_download
217 223 # RepoCommitsView:repo_commit_patch
218 224 # RepoCommitsView:repo_commit_raw
219 225 # RepoCommitsView:repo_commit_raw@TOKEN
220 226 # RepoFilesView:repo_files_diff
221 227 # RepoFilesView:repo_archivefile
222 228 # RepoFilesView:repo_file_raw
223 229 # GistView:*
224 230 api_access_controllers_whitelist =
225 231
226 232 ## default encoding used to convert from and to unicode
227 233 ## can be also a comma separated list of encoding in case of mixed encodings
228 234 default_encoding = UTF-8
229 235
230 236 ## instance-id prefix
231 237 ## a prefix key for this instance used for cache invalidation when running
232 238 ## multiple instances of rhodecode, make sure it's globally unique for
233 239 ## all running rhodecode instances. Leave empty if you don't use it
234 240 instance_id =
235 241
236 242 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
237 243 ## of an authentication plugin also if it is disabled by it's settings.
238 244 ## This could be useful if you are unable to log in to the system due to broken
239 245 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
240 246 ## module to log in again and fix the settings.
241 247 ##
242 248 ## Available builtin plugin IDs (hash is part of the ID):
243 249 ## egg:rhodecode-enterprise-ce#rhodecode
244 250 ## egg:rhodecode-enterprise-ce#pam
245 251 ## egg:rhodecode-enterprise-ce#ldap
246 252 ## egg:rhodecode-enterprise-ce#jasig_cas
247 253 ## egg:rhodecode-enterprise-ce#headers
248 254 ## egg:rhodecode-enterprise-ce#crowd
249 255 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
250 256
251 257 ## alternative return HTTP header for failed authentication. Default HTTP
252 258 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
253 259 ## handling that causing a series of failed authentication calls.
254 260 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
255 261 ## This will be served instead of default 401 on bad authnetication
256 262 auth_ret_code =
257 263
258 264 ## use special detection method when serving auth_ret_code, instead of serving
259 265 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
260 266 ## and then serve auth_ret_code to clients
261 267 auth_ret_code_detection = false
262 268
263 269 ## locking return code. When repository is locked return this HTTP code. 2XX
264 270 ## codes don't break the transactions while 4XX codes do
265 271 lock_ret_code = 423
266 272
267 273 ## allows to change the repository location in settings page
268 274 allow_repo_location_change = true
269 275
270 276 ## allows to setup custom hooks in settings page
271 277 allow_custom_hooks_settings = true
272 278
273 279 ## generated license token, goto license page in RhodeCode settings to obtain
274 280 ## new token
275 281 license_token =
276 282
277 283 ## supervisor connection uri, for managing supervisor and logs.
278 284 supervisor.uri =
279 285 ## supervisord group name/id we only want this RC instance to handle
280 286 supervisor.group_id = dev
281 287
282 288 ## Display extended labs settings
283 289 labs_settings_active = true
284 290
285 291 ####################################
286 292 ### CELERY CONFIG ####
287 293 ####################################
288 294 use_celery = false
289 295 broker.host = localhost
290 296 broker.vhost = rabbitmqhost
291 297 broker.port = 5672
292 298 broker.user = rabbitmq
293 299 broker.password = qweqwe
294 300
295 301 celery.imports = rhodecode.lib.celerylib.tasks
296 302
297 303 celery.result.backend = amqp
298 304 celery.result.dburi = amqp://
299 305 celery.result.serialier = json
300 306
301 307 #celery.send.task.error.emails = true
302 308 #celery.amqp.task.result.expires = 18000
303 309
304 310 celeryd.concurrency = 2
305 311 #celeryd.log.file = celeryd.log
306 312 celeryd.log.level = debug
307 313 celeryd.max.tasks.per.child = 1
308 314
309 315 ## tasks will never be sent to the queue, but executed locally instead.
310 316 celery.always.eager = false
311 317
312 318 ####################################
313 319 ### BEAKER CACHE ####
314 320 ####################################
315 321 # default cache dir for templates. Putting this into a ramdisk
316 322 ## can boost performance, eg. %(here)s/data_ramdisk
317 323 cache_dir = %(here)s/data
318 324
319 325 ## locking and default file storage for Beaker. Putting this into a ramdisk
320 326 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
321 327 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
322 328 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
323 329
324 330 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
325 331
326 332 beaker.cache.super_short_term.type = memory
327 333 beaker.cache.super_short_term.expire = 10
328 334 beaker.cache.super_short_term.key_length = 256
329 335
330 336 beaker.cache.short_term.type = memory
331 337 beaker.cache.short_term.expire = 60
332 338 beaker.cache.short_term.key_length = 256
333 339
334 340 beaker.cache.long_term.type = memory
335 341 beaker.cache.long_term.expire = 36000
336 342 beaker.cache.long_term.key_length = 256
337 343
338 344 beaker.cache.sql_cache_short.type = memory
339 345 beaker.cache.sql_cache_short.expire = 10
340 346 beaker.cache.sql_cache_short.key_length = 256
341 347
342 348 ## default is memory cache, configure only if required
343 349 ## using multi-node or multi-worker setup
344 350 #beaker.cache.auth_plugins.type = ext:database
345 351 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
346 352 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
347 353 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
348 354 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
349 355 #beaker.cache.auth_plugins.sa.pool_size = 10
350 356 #beaker.cache.auth_plugins.sa.max_overflow = 0
351 357
352 358 beaker.cache.repo_cache_long.type = memorylru_base
353 359 beaker.cache.repo_cache_long.max_items = 4096
354 360 beaker.cache.repo_cache_long.expire = 2592000
355 361
356 362 ## default is memorylru_base cache, configure only if required
357 363 ## using multi-node or multi-worker setup
358 364 #beaker.cache.repo_cache_long.type = ext:memcached
359 365 #beaker.cache.repo_cache_long.url = localhost:11211
360 366 #beaker.cache.repo_cache_long.expire = 1209600
361 367 #beaker.cache.repo_cache_long.key_length = 256
362 368
363 369 ####################################
364 370 ### BEAKER SESSION ####
365 371 ####################################
366 372
367 373 ## .session.type is type of storage options for the session, current allowed
368 374 ## types are file, ext:memcached, ext:database, and memory (default).
369 375 beaker.session.type = file
370 376 beaker.session.data_dir = %(here)s/data/sessions/data
371 377
372 378 ## db based session, fast, and allows easy management over logged in users
373 379 #beaker.session.type = ext:database
374 380 #beaker.session.table_name = db_session
375 381 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
376 382 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
377 383 #beaker.session.sa.pool_recycle = 3600
378 384 #beaker.session.sa.echo = false
379 385
380 386 beaker.session.key = rhodecode
381 387 beaker.session.secret = develop-rc-uytcxaz
382 388 beaker.session.lock_dir = %(here)s/data/sessions/lock
383 389
384 390 ## Secure encrypted cookie. Requires AES and AES python libraries
385 391 ## you must disable beaker.session.secret to use this
386 392 #beaker.session.encrypt_key = key_for_encryption
387 393 #beaker.session.validate_key = validation_key
388 394
389 395 ## sets session as invalid(also logging out user) if it haven not been
390 396 ## accessed for given amount of time in seconds
391 397 beaker.session.timeout = 2592000
392 398 beaker.session.httponly = true
393 399 ## Path to use for the cookie. Set to prefix if you use prefix middleware
394 400 #beaker.session.cookie_path = /custom_prefix
395 401
396 402 ## uncomment for https secure cookie
397 403 beaker.session.secure = false
398 404
399 405 ## auto save the session to not to use .save()
400 406 beaker.session.auto = false
401 407
402 408 ## default cookie expiration time in seconds, set to `true` to set expire
403 409 ## at browser close
404 410 #beaker.session.cookie_expires = 3600
405 411
406 412 ###################################
407 413 ## SEARCH INDEXING CONFIGURATION ##
408 414 ###################################
409 415 ## Full text search indexer is available in rhodecode-tools under
410 416 ## `rhodecode-tools index` command
411 417
412 418 ## WHOOSH Backend, doesn't require additional services to run
413 419 ## it works good with few dozen repos
414 420 search.module = rhodecode.lib.index.whoosh
415 421 search.location = %(here)s/data/index
416 422
417 423 ########################################
418 424 ### CHANNELSTREAM CONFIG ####
419 425 ########################################
420 426 ## channelstream enables persistent connections and live notification
421 427 ## in the system. It's also used by the chat system
422 428 channelstream.enabled = false
423 429
424 430 ## server address for channelstream server on the backend
425 431 channelstream.server = 127.0.0.1:9800
426 432
427 433 ## location of the channelstream server from outside world
428 434 ## use ws:// for http or wss:// for https. This address needs to be handled
429 435 ## by external HTTP server such as Nginx or Apache
430 436 ## see nginx/apache configuration examples in our docs
431 437 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
432 438 channelstream.secret = secret
433 439 channelstream.history.location = %(here)s/channelstream_history
434 440
435 441 ## Internal application path that Javascript uses to connect into.
436 442 ## If you use proxy-prefix the prefix should be added before /_channelstream
437 443 channelstream.proxy_path = /_channelstream
438 444
439 445
440 446 ###################################
441 447 ## APPENLIGHT CONFIG ##
442 448 ###################################
443 449
444 450 ## Appenlight is tailored to work with RhodeCode, see
445 451 ## http://appenlight.com for details how to obtain an account
446 452
447 453 ## appenlight integration enabled
448 454 appenlight = false
449 455
450 456 appenlight.server_url = https://api.appenlight.com
451 457 appenlight.api_key = YOUR_API_KEY
452 458 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
453 459
454 460 # used for JS client
455 461 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
456 462
457 463 ## TWEAK AMOUNT OF INFO SENT HERE
458 464
459 465 ## enables 404 error logging (default False)
460 466 appenlight.report_404 = false
461 467
462 468 ## time in seconds after request is considered being slow (default 1)
463 469 appenlight.slow_request_time = 1
464 470
465 471 ## record slow requests in application
466 472 ## (needs to be enabled for slow datastore recording and time tracking)
467 473 appenlight.slow_requests = true
468 474
469 475 ## enable hooking to application loggers
470 476 appenlight.logging = true
471 477
472 478 ## minimum log level for log capture
473 479 appenlight.logging.level = WARNING
474 480
475 481 ## send logs only from erroneous/slow requests
476 482 ## (saves API quota for intensive logging)
477 483 appenlight.logging_on_error = false
478 484
479 485 ## list of additonal keywords that should be grabbed from environ object
480 486 ## can be string with comma separated list of words in lowercase
481 487 ## (by default client will always send following info:
482 488 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
483 489 ## start with HTTP* this list be extended with additional keywords here
484 490 appenlight.environ_keys_whitelist =
485 491
486 492 ## list of keywords that should be blanked from request object
487 493 ## can be string with comma separated list of words in lowercase
488 494 ## (by default client will always blank keys that contain following words
489 495 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
490 496 ## this list be extended with additional keywords set here
491 497 appenlight.request_keys_blacklist =
492 498
493 499 ## list of namespaces that should be ignores when gathering log entries
494 500 ## can be string with comma separated list of namespaces
495 501 ## (by default the client ignores own entries: appenlight_client.client)
496 502 appenlight.log_namespace_blacklist =
497 503
498 504
499 505 ################################################################################
500 506 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
501 507 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
502 508 ## execute malicious code after an exception is raised. ##
503 509 ################################################################################
504 510 #set debug = false
505 511
506 512
507 513 ##############
508 514 ## STYLING ##
509 515 ##############
510 516 debug_style = true
511 517
512 518 ###########################################
513 519 ### MAIN RHODECODE DATABASE CONFIG ###
514 520 ###########################################
515 521 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
516 522 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
517 523 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
518 524 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
519 525
520 526 # see sqlalchemy docs for other advanced settings
521 527
522 528 ## print the sql statements to output
523 529 sqlalchemy.db1.echo = false
524 530 ## recycle the connections after this amount of seconds
525 531 sqlalchemy.db1.pool_recycle = 3600
526 532 sqlalchemy.db1.convert_unicode = true
527 533
528 534 ## the number of connections to keep open inside the connection pool.
529 535 ## 0 indicates no limit
530 536 #sqlalchemy.db1.pool_size = 5
531 537
532 538 ## the number of connections to allow in connection pool "overflow", that is
533 539 ## connections that can be opened above and beyond the pool_size setting,
534 540 ## which defaults to five.
535 541 #sqlalchemy.db1.max_overflow = 10
536 542
537 543
538 544 ##################
539 545 ### VCS CONFIG ###
540 546 ##################
541 547 vcs.server.enable = true
542 548 vcs.server = localhost:9900
543 549
544 550 ## Web server connectivity protocol, responsible for web based VCS operatations
545 551 ## Available protocols are:
546 552 ## `http` - use http-rpc backend (default)
547 553 vcs.server.protocol = http
548 554
549 555 ## Push/Pull operations protocol, available options are:
550 556 ## `http` - use http-rpc backend (default)
551 557 ##
552 558 vcs.scm_app_implementation = http
553 559
554 560 ## Push/Pull operations hooks protocol, available options are:
555 561 ## `http` - use http-rpc backend (default)
556 562 vcs.hooks.protocol = http
557 563
558 564 vcs.server.log_level = debug
559 565 ## Start VCSServer with this instance as a subprocess, usefull for development
560 566 vcs.start_server = true
561 567
562 568 ## List of enabled VCS backends, available options are:
563 569 ## `hg` - mercurial
564 570 ## `git` - git
565 571 ## `svn` - subversion
566 572 vcs.backends = hg, git, svn
567 573
568 574 vcs.connection_timeout = 3600
569 575 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
570 576 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
571 577 #vcs.svn.compatible_version = pre-1.8-compatible
572 578
573 579
574 580 ############################################################
575 581 ### Subversion proxy support (mod_dav_svn) ###
576 582 ### Maps RhodeCode repo groups into SVN paths for Apache ###
577 583 ############################################################
578 584 ## Enable or disable the config file generation.
579 585 svn.proxy.generate_config = false
580 586 ## Generate config file with `SVNListParentPath` set to `On`.
581 587 svn.proxy.list_parent_path = true
582 588 ## Set location and file name of generated config file.
583 589 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
584 590 ## Used as a prefix to the `Location` block in the generated config file.
585 591 ## In most cases it should be set to `/`.
586 592 svn.proxy.location_root = /
587 593 ## Command to reload the mod dav svn configuration on change.
588 594 ## Example: `/etc/init.d/apache2 reload`
589 595 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
590 596 ## If the timeout expires before the reload command finishes, the command will
591 597 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
592 598 #svn.proxy.reload_timeout = 10
593 599
594 600 ############################################################
595 601 ### SSH Support Settings ###
596 602 ############################################################
597 603
598 604 ## Defines if the authorized_keys file should be written on any change of
599 605 ## user ssh keys, setting this to false also disables posibility of adding
600 606 ## ssh keys for users from web interface.
601 607 ssh.generate_authorized_keyfile = false
602 608
603 609 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
604 610 # ssh.authorized_keys_ssh_opts =
605 611
606 612 ## File to generate the authorized keys together with options
607 613 ## It is possible to have multiple key files specified in `sshd_config` e.g.
608 614 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
609 615 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
610 616
611 617 ## Command to execute the SSH wrapper. The binary is available in the
612 618 ## rhodecode installation directory.
613 619 ## e.g ~/.rccontrol/community-1/profile/bin/rcssh-wrapper
614 620 ssh.wrapper_cmd = ~/.rccontrol/community-1/rcssh-wrapper
615 621
616 622 ## Allow shell when executing the ssh-wrapper command
617 623 ssh.wrapper_cmd_allow_shell = false
618 624
619 625 ## Enables logging, and detailed output send back to the client. Usefull for
620 626 ## debugging, shouldn't be used in production.
621 627 ssh.enable_debug_logging = false
622 628
623 629 ## API KEY for user who has access to fetch other user permission information
624 630 ## most likely an super-admin account with some IP restrictions.
625 631 ssh.api_key =
626 632
627 633 ## API Host, the server address of RhodeCode instance that the api_key will
628 634 ## access
629 635 ssh.api_host = http://localhost
630 636
631 637 ## Paths to binary executrables, by default they are the names, but we can
632 638 ## override them if we want to use a custom one
633 639 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
634 640 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
635 641 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
636 642
637 643
638 644 ## Dummy marker to add new entries after.
639 645 ## Add any custom entries below. Please don't remove.
640 646 custom.conf = 1
641 647
642 648
643 649 ################################
644 650 ### LOGGING CONFIGURATION ####
645 651 ################################
646 652 [loggers]
647 653 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, ssh_wrapper
648 654
649 655 [handlers]
650 656 keys = console, console_sql
651 657
652 658 [formatters]
653 659 keys = generic, color_formatter, color_formatter_sql
654 660
655 661 #############
656 662 ## LOGGERS ##
657 663 #############
658 664 [logger_root]
659 665 level = NOTSET
660 666 handlers = console
661 667
662 668 [logger_routes]
663 669 level = DEBUG
664 670 handlers =
665 671 qualname = routes.middleware
666 672 ## "level = DEBUG" logs the route matched and routing variables.
667 673 propagate = 1
668 674
669 675 [logger_beaker]
670 676 level = DEBUG
671 677 handlers =
672 678 qualname = beaker.container
673 679 propagate = 1
674 680
675 681 [logger_templates]
676 682 level = INFO
677 683 handlers =
678 684 qualname = pylons.templating
679 685 propagate = 1
680 686
681 687 [logger_rhodecode]
682 688 level = DEBUG
683 689 handlers =
684 690 qualname = rhodecode
685 691 propagate = 1
686 692
687 693 [logger_sqlalchemy]
688 694 level = INFO
689 695 handlers = console_sql
690 696 qualname = sqlalchemy.engine
691 697 propagate = 0
692 698
693 699 [logger_ssh_wrapper]
694 700 level = DEBUG
695 701 handlers =
696 702 qualname = ssh_wrapper
697 703 propagate = 1
698 704
699 705
700 706 ##############
701 707 ## HANDLERS ##
702 708 ##############
703 709
704 710 [handler_console]
705 711 class = StreamHandler
706 712 args = (sys.stderr, )
707 713 level = DEBUG
708 714 formatter = color_formatter
709 715
710 716 [handler_console_sql]
711 717 class = StreamHandler
712 718 args = (sys.stderr, )
713 719 level = DEBUG
714 720 formatter = color_formatter_sql
715 721
716 722 ################
717 723 ## FORMATTERS ##
718 724 ################
719 725
720 726 [formatter_generic]
721 727 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
722 728 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
723 729 datefmt = %Y-%m-%d %H:%M:%S
724 730
725 731 [formatter_color_formatter]
726 732 class = rhodecode.lib.logging_formatter.ColorFormatter
727 733 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
728 734 datefmt = %Y-%m-%d %H:%M:%S
729 735
730 736 [formatter_color_formatter_sql]
731 737 class = rhodecode.lib.logging_formatter.ColorFormatterSql
732 738 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
733 739 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,702 +1,708 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 127.0.0.1
47 47 port = 5000
48 48
49 49 ##################################
50 50 ## WAITRESS WSGI SERVER ##
51 51 ## Recommended for Development ##
52 52 ##################################
53 53
54 54 #use = egg:waitress#main
55 55 ## number of worker threads
56 56 #threads = 5
57 57 ## MAX BODY SIZE 100GB
58 58 #max_request_body_size = 107374182400
59 59 ## Use poll instead of select, fixes file descriptors limits problems.
60 60 ## May not work on old windows systems.
61 61 #asyncore_use_poll = true
62 62
63 63
64 64 ##########################
65 65 ## GUNICORN WSGI SERVER ##
66 66 ##########################
67 67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68 68
69 69 use = egg:gunicorn#main
70 70 ## Sets the number of process workers. You must set `instance_id = *`
71 71 ## when this option is set to more than one worker, recommended
72 72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 73 ## The `instance_id = *` must be set in the [app:main] section below
74 74 workers = 2
75 75 ## number of threads for each of the worker, must be set to 1 for gevent
76 76 ## generally recommened to be at 1
77 77 #threads = 1
78 78 ## process name
79 79 proc_name = rhodecode
80 80 ## type of worker class, one of sync, gevent
81 81 ## recommended for bigger setup is using of of other than sync one
82 82 worker_class = sync
83 83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 84 #worker_connections = 10
85 85 ## max number of requests that worker will handle before being gracefully
86 86 ## restarted, could prevent memory leaks
87 87 max_requests = 1000
88 88 max_requests_jitter = 30
89 89 ## amount of time a worker can spend with handling a request before it
90 90 ## gets killed and restarted. Set to 6hrs
91 91 timeout = 21600
92 92
93 93
94 94 ## prefix middleware for RhodeCode.
95 95 ## recommended when using proxy setup.
96 96 ## allows to set RhodeCode under a prefix in server.
97 97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 98 ## And set your prefix like: `prefix = /custom_prefix`
99 99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 100 ## to make your cookies only work on prefix url
101 101 [filter:proxy-prefix]
102 102 use = egg:PasteDeploy#prefix
103 103 prefix = /
104 104
105 105 [app:main]
106 106 use = egg:rhodecode-enterprise-ce
107 107
108 108 ## enable proxy prefix middleware, defined above
109 109 #filter-with = proxy-prefix
110 110
111 111 ## encryption key used to encrypt social plugin tokens,
112 112 ## remote_urls with credentials etc, if not set it defaults to
113 113 ## `beaker.session.secret`
114 114 #rhodecode.encrypted_values.secret =
115 115
116 116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 118 #rhodecode.encrypted_values.strict = false
119 119
120 120 ## return gzipped responses from Rhodecode (static files/application)
121 121 gzip_responses = false
122 122
123 123 ## autogenerate javascript routes file on startup
124 124 generate_js_files = false
125 125
126 126 ## Optional Languages
127 127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 128 lang = en
129 129
130 130 ## perform a full repository scan on each server start, this should be
131 131 ## set to false after first startup, to allow faster server restarts.
132 132 startup.import_repos = false
133 133
134 134 ## Uncomment and set this path to use archive download cache.
135 135 ## Once enabled, generated archives will be cached at this location
136 136 ## and served from the cache during subsequent requests for the same archive of
137 137 ## the repository.
138 138 #archive_cache_dir = /tmp/tarballcache
139 139
140 140 ## change this to unique ID for security
141 141 app_instance_uuid = rc-production
142 142
143 ## cut off limit for large diffs (size in bytes)
144 cut_off_limit_diff = 1024000
145 cut_off_limit_file = 256000
143 ## cut off limit for large diffs (size in bytes). If overall diff size on
144 ## commit, or pull request exceeds this limit this diff will be displayed
145 ## partially. E.g 512000 == 512Kb
146 cut_off_limit_diff = 512000
147
148 ## cut off limit for large files inside diffs (size in bytes). Each individual
149 ## file inside diff which exceeds this limit will be displayed partially.
150 ## E.g 128000 == 128Kb
151 cut_off_limit_file = 128000
146 152
147 153 ## use cache version of scm repo everywhere
148 154 vcs_full_cache = true
149 155
150 156 ## force https in RhodeCode, fixes https redirects, assumes it's always https
151 157 ## Normally this is controlled by proper http flags sent from http server
152 158 force_https = false
153 159
154 160 ## use Strict-Transport-Security headers
155 161 use_htsts = false
156 162
157 163 ## number of commits stats will parse on each iteration
158 164 commit_parse_limit = 25
159 165
160 166 ## git rev filter option, --all is the default filter, if you need to
161 167 ## hide all refs in changelog switch this to --branches --tags
162 168 git_rev_filter = --branches --tags
163 169
164 170 # Set to true if your repos are exposed using the dumb protocol
165 171 git_update_server_info = false
166 172
167 173 ## RSS/ATOM feed options
168 174 rss_cut_off_limit = 256000
169 175 rss_items_per_page = 10
170 176 rss_include_diff = false
171 177
172 178 ## gist URL alias, used to create nicer urls for gist. This should be an
173 179 ## url that does rewrites to _admin/gists/{gistid}.
174 180 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
175 181 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
176 182 gist_alias_url =
177 183
178 184 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
179 185 ## used for access.
180 186 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
181 187 ## came from the the logged in user who own this authentication token.
182 188 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
183 189 ## authentication token. Such view would be only accessible when used together
184 190 ## with this authentication token
185 191 ##
186 192 ## list of all views can be found under `/_admin/permissions/auth_token_access`
187 193 ## The list should be "," separated and on a single line.
188 194 ##
189 195 ## Most common views to enable:
190 196 # RepoCommitsView:repo_commit_download
191 197 # RepoCommitsView:repo_commit_patch
192 198 # RepoCommitsView:repo_commit_raw
193 199 # RepoCommitsView:repo_commit_raw@TOKEN
194 200 # RepoFilesView:repo_files_diff
195 201 # RepoFilesView:repo_archivefile
196 202 # RepoFilesView:repo_file_raw
197 203 # GistView:*
198 204 api_access_controllers_whitelist =
199 205
200 206 ## default encoding used to convert from and to unicode
201 207 ## can be also a comma separated list of encoding in case of mixed encodings
202 208 default_encoding = UTF-8
203 209
204 210 ## instance-id prefix
205 211 ## a prefix key for this instance used for cache invalidation when running
206 212 ## multiple instances of rhodecode, make sure it's globally unique for
207 213 ## all running rhodecode instances. Leave empty if you don't use it
208 214 instance_id =
209 215
210 216 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
211 217 ## of an authentication plugin also if it is disabled by it's settings.
212 218 ## This could be useful if you are unable to log in to the system due to broken
213 219 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
214 220 ## module to log in again and fix the settings.
215 221 ##
216 222 ## Available builtin plugin IDs (hash is part of the ID):
217 223 ## egg:rhodecode-enterprise-ce#rhodecode
218 224 ## egg:rhodecode-enterprise-ce#pam
219 225 ## egg:rhodecode-enterprise-ce#ldap
220 226 ## egg:rhodecode-enterprise-ce#jasig_cas
221 227 ## egg:rhodecode-enterprise-ce#headers
222 228 ## egg:rhodecode-enterprise-ce#crowd
223 229 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
224 230
225 231 ## alternative return HTTP header for failed authentication. Default HTTP
226 232 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
227 233 ## handling that causing a series of failed authentication calls.
228 234 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
229 235 ## This will be served instead of default 401 on bad authnetication
230 236 auth_ret_code =
231 237
232 238 ## use special detection method when serving auth_ret_code, instead of serving
233 239 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
234 240 ## and then serve auth_ret_code to clients
235 241 auth_ret_code_detection = false
236 242
237 243 ## locking return code. When repository is locked return this HTTP code. 2XX
238 244 ## codes don't break the transactions while 4XX codes do
239 245 lock_ret_code = 423
240 246
241 247 ## allows to change the repository location in settings page
242 248 allow_repo_location_change = true
243 249
244 250 ## allows to setup custom hooks in settings page
245 251 allow_custom_hooks_settings = true
246 252
247 253 ## generated license token, goto license page in RhodeCode settings to obtain
248 254 ## new token
249 255 license_token =
250 256
251 257 ## supervisor connection uri, for managing supervisor and logs.
252 258 supervisor.uri =
253 259 ## supervisord group name/id we only want this RC instance to handle
254 260 supervisor.group_id = prod
255 261
256 262 ## Display extended labs settings
257 263 labs_settings_active = true
258 264
259 265 ####################################
260 266 ### CELERY CONFIG ####
261 267 ####################################
262 268 use_celery = false
263 269 broker.host = localhost
264 270 broker.vhost = rabbitmqhost
265 271 broker.port = 5672
266 272 broker.user = rabbitmq
267 273 broker.password = qweqwe
268 274
269 275 celery.imports = rhodecode.lib.celerylib.tasks
270 276
271 277 celery.result.backend = amqp
272 278 celery.result.dburi = amqp://
273 279 celery.result.serialier = json
274 280
275 281 #celery.send.task.error.emails = true
276 282 #celery.amqp.task.result.expires = 18000
277 283
278 284 celeryd.concurrency = 2
279 285 #celeryd.log.file = celeryd.log
280 286 celeryd.log.level = debug
281 287 celeryd.max.tasks.per.child = 1
282 288
283 289 ## tasks will never be sent to the queue, but executed locally instead.
284 290 celery.always.eager = false
285 291
286 292 ####################################
287 293 ### BEAKER CACHE ####
288 294 ####################################
289 295 # default cache dir for templates. Putting this into a ramdisk
290 296 ## can boost performance, eg. %(here)s/data_ramdisk
291 297 cache_dir = %(here)s/data
292 298
293 299 ## locking and default file storage for Beaker. Putting this into a ramdisk
294 300 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
295 301 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
296 302 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
297 303
298 304 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
299 305
300 306 beaker.cache.super_short_term.type = memory
301 307 beaker.cache.super_short_term.expire = 10
302 308 beaker.cache.super_short_term.key_length = 256
303 309
304 310 beaker.cache.short_term.type = memory
305 311 beaker.cache.short_term.expire = 60
306 312 beaker.cache.short_term.key_length = 256
307 313
308 314 beaker.cache.long_term.type = memory
309 315 beaker.cache.long_term.expire = 36000
310 316 beaker.cache.long_term.key_length = 256
311 317
312 318 beaker.cache.sql_cache_short.type = memory
313 319 beaker.cache.sql_cache_short.expire = 10
314 320 beaker.cache.sql_cache_short.key_length = 256
315 321
316 322 ## default is memory cache, configure only if required
317 323 ## using multi-node or multi-worker setup
318 324 #beaker.cache.auth_plugins.type = ext:database
319 325 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
320 326 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
321 327 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
322 328 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
323 329 #beaker.cache.auth_plugins.sa.pool_size = 10
324 330 #beaker.cache.auth_plugins.sa.max_overflow = 0
325 331
326 332 beaker.cache.repo_cache_long.type = memorylru_base
327 333 beaker.cache.repo_cache_long.max_items = 4096
328 334 beaker.cache.repo_cache_long.expire = 2592000
329 335
330 336 ## default is memorylru_base cache, configure only if required
331 337 ## using multi-node or multi-worker setup
332 338 #beaker.cache.repo_cache_long.type = ext:memcached
333 339 #beaker.cache.repo_cache_long.url = localhost:11211
334 340 #beaker.cache.repo_cache_long.expire = 1209600
335 341 #beaker.cache.repo_cache_long.key_length = 256
336 342
337 343 ####################################
338 344 ### BEAKER SESSION ####
339 345 ####################################
340 346
341 347 ## .session.type is type of storage options for the session, current allowed
342 348 ## types are file, ext:memcached, ext:database, and memory (default).
343 349 beaker.session.type = file
344 350 beaker.session.data_dir = %(here)s/data/sessions/data
345 351
346 352 ## db based session, fast, and allows easy management over logged in users
347 353 #beaker.session.type = ext:database
348 354 #beaker.session.table_name = db_session
349 355 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
350 356 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
351 357 #beaker.session.sa.pool_recycle = 3600
352 358 #beaker.session.sa.echo = false
353 359
354 360 beaker.session.key = rhodecode
355 361 beaker.session.secret = production-rc-uytcxaz
356 362 beaker.session.lock_dir = %(here)s/data/sessions/lock
357 363
358 364 ## Secure encrypted cookie. Requires AES and AES python libraries
359 365 ## you must disable beaker.session.secret to use this
360 366 #beaker.session.encrypt_key = key_for_encryption
361 367 #beaker.session.validate_key = validation_key
362 368
363 369 ## sets session as invalid(also logging out user) if it haven not been
364 370 ## accessed for given amount of time in seconds
365 371 beaker.session.timeout = 2592000
366 372 beaker.session.httponly = true
367 373 ## Path to use for the cookie. Set to prefix if you use prefix middleware
368 374 #beaker.session.cookie_path = /custom_prefix
369 375
370 376 ## uncomment for https secure cookie
371 377 beaker.session.secure = false
372 378
373 379 ## auto save the session to not to use .save()
374 380 beaker.session.auto = false
375 381
376 382 ## default cookie expiration time in seconds, set to `true` to set expire
377 383 ## at browser close
378 384 #beaker.session.cookie_expires = 3600
379 385
380 386 ###################################
381 387 ## SEARCH INDEXING CONFIGURATION ##
382 388 ###################################
383 389 ## Full text search indexer is available in rhodecode-tools under
384 390 ## `rhodecode-tools index` command
385 391
386 392 ## WHOOSH Backend, doesn't require additional services to run
387 393 ## it works good with few dozen repos
388 394 search.module = rhodecode.lib.index.whoosh
389 395 search.location = %(here)s/data/index
390 396
391 397 ########################################
392 398 ### CHANNELSTREAM CONFIG ####
393 399 ########################################
394 400 ## channelstream enables persistent connections and live notification
395 401 ## in the system. It's also used by the chat system
396 402 channelstream.enabled = false
397 403
398 404 ## server address for channelstream server on the backend
399 405 channelstream.server = 127.0.0.1:9800
400 406
401 407 ## location of the channelstream server from outside world
402 408 ## use ws:// for http or wss:// for https. This address needs to be handled
403 409 ## by external HTTP server such as Nginx or Apache
404 410 ## see nginx/apache configuration examples in our docs
405 411 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
406 412 channelstream.secret = secret
407 413 channelstream.history.location = %(here)s/channelstream_history
408 414
409 415 ## Internal application path that Javascript uses to connect into.
410 416 ## If you use proxy-prefix the prefix should be added before /_channelstream
411 417 channelstream.proxy_path = /_channelstream
412 418
413 419
414 420 ###################################
415 421 ## APPENLIGHT CONFIG ##
416 422 ###################################
417 423
418 424 ## Appenlight is tailored to work with RhodeCode, see
419 425 ## http://appenlight.com for details how to obtain an account
420 426
421 427 ## appenlight integration enabled
422 428 appenlight = false
423 429
424 430 appenlight.server_url = https://api.appenlight.com
425 431 appenlight.api_key = YOUR_API_KEY
426 432 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
427 433
428 434 # used for JS client
429 435 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
430 436
431 437 ## TWEAK AMOUNT OF INFO SENT HERE
432 438
433 439 ## enables 404 error logging (default False)
434 440 appenlight.report_404 = false
435 441
436 442 ## time in seconds after request is considered being slow (default 1)
437 443 appenlight.slow_request_time = 1
438 444
439 445 ## record slow requests in application
440 446 ## (needs to be enabled for slow datastore recording and time tracking)
441 447 appenlight.slow_requests = true
442 448
443 449 ## enable hooking to application loggers
444 450 appenlight.logging = true
445 451
446 452 ## minimum log level for log capture
447 453 appenlight.logging.level = WARNING
448 454
449 455 ## send logs only from erroneous/slow requests
450 456 ## (saves API quota for intensive logging)
451 457 appenlight.logging_on_error = false
452 458
453 459 ## list of additonal keywords that should be grabbed from environ object
454 460 ## can be string with comma separated list of words in lowercase
455 461 ## (by default client will always send following info:
456 462 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
457 463 ## start with HTTP* this list be extended with additional keywords here
458 464 appenlight.environ_keys_whitelist =
459 465
460 466 ## list of keywords that should be blanked from request object
461 467 ## can be string with comma separated list of words in lowercase
462 468 ## (by default client will always blank keys that contain following words
463 469 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
464 470 ## this list be extended with additional keywords set here
465 471 appenlight.request_keys_blacklist =
466 472
467 473 ## list of namespaces that should be ignores when gathering log entries
468 474 ## can be string with comma separated list of namespaces
469 475 ## (by default the client ignores own entries: appenlight_client.client)
470 476 appenlight.log_namespace_blacklist =
471 477
472 478
473 479 ################################################################################
474 480 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
475 481 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
476 482 ## execute malicious code after an exception is raised. ##
477 483 ################################################################################
478 484 set debug = false
479 485
480 486
481 487 ###########################################
482 488 ### MAIN RHODECODE DATABASE CONFIG ###
483 489 ###########################################
484 490 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
485 491 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
486 492 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
487 493 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
488 494
489 495 # see sqlalchemy docs for other advanced settings
490 496
491 497 ## print the sql statements to output
492 498 sqlalchemy.db1.echo = false
493 499 ## recycle the connections after this amount of seconds
494 500 sqlalchemy.db1.pool_recycle = 3600
495 501 sqlalchemy.db1.convert_unicode = true
496 502
497 503 ## the number of connections to keep open inside the connection pool.
498 504 ## 0 indicates no limit
499 505 #sqlalchemy.db1.pool_size = 5
500 506
501 507 ## the number of connections to allow in connection pool "overflow", that is
502 508 ## connections that can be opened above and beyond the pool_size setting,
503 509 ## which defaults to five.
504 510 #sqlalchemy.db1.max_overflow = 10
505 511
506 512
507 513 ##################
508 514 ### VCS CONFIG ###
509 515 ##################
510 516 vcs.server.enable = true
511 517 vcs.server = localhost:9900
512 518
513 519 ## Web server connectivity protocol, responsible for web based VCS operatations
514 520 ## Available protocols are:
515 521 ## `http` - use http-rpc backend (default)
516 522 vcs.server.protocol = http
517 523
518 524 ## Push/Pull operations protocol, available options are:
519 525 ## `http` - use http-rpc backend (default)
520 526 ##
521 527 vcs.scm_app_implementation = http
522 528
523 529 ## Push/Pull operations hooks protocol, available options are:
524 530 ## `http` - use http-rpc backend (default)
525 531 vcs.hooks.protocol = http
526 532
527 533 vcs.server.log_level = info
528 534 ## Start VCSServer with this instance as a subprocess, usefull for development
529 535 vcs.start_server = false
530 536
531 537 ## List of enabled VCS backends, available options are:
532 538 ## `hg` - mercurial
533 539 ## `git` - git
534 540 ## `svn` - subversion
535 541 vcs.backends = hg, git, svn
536 542
537 543 vcs.connection_timeout = 3600
538 544 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
539 545 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
540 546 #vcs.svn.compatible_version = pre-1.8-compatible
541 547
542 548
543 549 ############################################################
544 550 ### Subversion proxy support (mod_dav_svn) ###
545 551 ### Maps RhodeCode repo groups into SVN paths for Apache ###
546 552 ############################################################
547 553 ## Enable or disable the config file generation.
548 554 svn.proxy.generate_config = false
549 555 ## Generate config file with `SVNListParentPath` set to `On`.
550 556 svn.proxy.list_parent_path = true
551 557 ## Set location and file name of generated config file.
552 558 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
553 559 ## Used as a prefix to the `Location` block in the generated config file.
554 560 ## In most cases it should be set to `/`.
555 561 svn.proxy.location_root = /
556 562 ## Command to reload the mod dav svn configuration on change.
557 563 ## Example: `/etc/init.d/apache2 reload`
558 564 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
559 565 ## If the timeout expires before the reload command finishes, the command will
560 566 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
561 567 #svn.proxy.reload_timeout = 10
562 568
563 569 ############################################################
564 570 ### SSH Support Settings ###
565 571 ############################################################
566 572
567 573 ## Defines if the authorized_keys file should be written on any change of
568 574 ## user ssh keys, setting this to false also disables posibility of adding
569 575 ## ssh keys for users from web interface.
570 576 ssh.generate_authorized_keyfile = false
571 577
572 578 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
573 579 # ssh.authorized_keys_ssh_opts =
574 580
575 581 ## File to generate the authorized keys together with options
576 582 ## It is possible to have multiple key files specified in `sshd_config` e.g.
577 583 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
578 584 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
579 585
580 586 ## Command to execute the SSH wrapper. The binary is available in the
581 587 ## rhodecode installation directory.
582 588 ## e.g ~/.rccontrol/community-1/profile/bin/rcssh-wrapper
583 589 ssh.wrapper_cmd = ~/.rccontrol/community-1/rcssh-wrapper
584 590
585 591 ## Allow shell when executing the ssh-wrapper command
586 592 ssh.wrapper_cmd_allow_shell = false
587 593
588 594 ## Enables logging, and detailed output send back to the client. Usefull for
589 595 ## debugging, shouldn't be used in production.
590 596 ssh.enable_debug_logging = false
591 597
592 598 ## API KEY for user who has access to fetch other user permission information
593 599 ## most likely an super-admin account with some IP restrictions.
594 600 ssh.api_key =
595 601
596 602 ## API Host, the server address of RhodeCode instance that the api_key will
597 603 ## access
598 604 ssh.api_host = http://localhost
599 605
600 606 ## Paths to binary executrables, by default they are the names, but we can
601 607 ## override them if we want to use a custom one
602 608 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
603 609 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
604 610 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
605 611
606 612
607 613 ## Dummy marker to add new entries after.
608 614 ## Add any custom entries below. Please don't remove.
609 615 custom.conf = 1
610 616
611 617
612 618 ################################
613 619 ### LOGGING CONFIGURATION ####
614 620 ################################
615 621 [loggers]
616 622 keys = root, routes, rhodecode, sqlalchemy, beaker, templates, ssh_wrapper
617 623
618 624 [handlers]
619 625 keys = console, console_sql
620 626
621 627 [formatters]
622 628 keys = generic, color_formatter, color_formatter_sql
623 629
624 630 #############
625 631 ## LOGGERS ##
626 632 #############
627 633 [logger_root]
628 634 level = NOTSET
629 635 handlers = console
630 636
631 637 [logger_routes]
632 638 level = DEBUG
633 639 handlers =
634 640 qualname = routes.middleware
635 641 ## "level = DEBUG" logs the route matched and routing variables.
636 642 propagate = 1
637 643
638 644 [logger_beaker]
639 645 level = DEBUG
640 646 handlers =
641 647 qualname = beaker.container
642 648 propagate = 1
643 649
644 650 [logger_templates]
645 651 level = INFO
646 652 handlers =
647 653 qualname = pylons.templating
648 654 propagate = 1
649 655
650 656 [logger_rhodecode]
651 657 level = DEBUG
652 658 handlers =
653 659 qualname = rhodecode
654 660 propagate = 1
655 661
656 662 [logger_sqlalchemy]
657 663 level = INFO
658 664 handlers = console_sql
659 665 qualname = sqlalchemy.engine
660 666 propagate = 0
661 667
662 668 [logger_ssh_wrapper]
663 669 level = DEBUG
664 670 handlers =
665 671 qualname = ssh_wrapper
666 672 propagate = 1
667 673
668 674
669 675 ##############
670 676 ## HANDLERS ##
671 677 ##############
672 678
673 679 [handler_console]
674 680 class = StreamHandler
675 681 args = (sys.stderr, )
676 682 level = INFO
677 683 formatter = generic
678 684
679 685 [handler_console_sql]
680 686 class = StreamHandler
681 687 args = (sys.stderr, )
682 688 level = WARN
683 689 formatter = generic
684 690
685 691 ################
686 692 ## FORMATTERS ##
687 693 ################
688 694
689 695 [formatter_generic]
690 696 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
691 697 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
692 698 datefmt = %Y-%m-%d %H:%M:%S
693 699
694 700 [formatter_color_formatter]
695 701 class = rhodecode.lib.logging_formatter.ColorFormatter
696 702 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
697 703 datefmt = %Y-%m-%d %H:%M:%S
698 704
699 705 [formatter_color_formatter_sql]
700 706 class = rhodecode.lib.logging_formatter.ColorFormatterSql
701 707 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
702 708 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,1164 +1,1170 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Set of diffing helpers, previously part of vcs
24 24 """
25 25
26 26 import collections
27 27 import re
28 28 import difflib
29 29 import logging
30 30
31 31 from itertools import tee, imap
32 32
33 33 from pylons.i18n.translation import _
34 34
35 35 from rhodecode.lib.vcs.exceptions import VCSError
36 36 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib.helpers import escape
39 39 from rhodecode.lib.utils2 import safe_unicode
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43 # define max context, a file with more than this numbers of lines is unusable
44 44 # in browser anyway
45 45 MAX_CONTEXT = 1024 * 1014
46 46
47 47
48 48 class OPS(object):
49 49 ADD = 'A'
50 50 MOD = 'M'
51 51 DEL = 'D'
52 52
53 53
54 54 def wrap_to_table(str_):
55 55 return '''<table class="code-difftable">
56 56 <tr class="line no-comment">
57 57 <td class="add-comment-line tooltip" title="%s"><span class="add-comment-content"></span></td>
58 58 <td></td>
59 59 <td class="lineno new"></td>
60 60 <td class="code no-comment"><pre>%s</pre></td>
61 61 </tr>
62 62 </table>''' % (_('Click to comment'), str_)
63 63
64 64
65 65 def wrapped_diff(filenode_old, filenode_new, diff_limit=None, file_limit=None,
66 66 show_full_diff=False, ignore_whitespace=True, line_context=3,
67 67 enable_comments=False):
68 68 """
69 69 returns a wrapped diff into a table, checks for cut_off_limit for file and
70 70 whole diff and presents proper message
71 71 """
72 72
73 73 if filenode_old is None:
74 74 filenode_old = FileNode(filenode_new.path, '', EmptyCommit())
75 75
76 76 if filenode_old.is_binary or filenode_new.is_binary:
77 77 diff = wrap_to_table(_('Binary file'))
78 78 stats = None
79 79 size = 0
80 80 data = None
81 81
82 82 elif diff_limit != -1 and (diff_limit is None or
83 83 (filenode_old.size < diff_limit and filenode_new.size < diff_limit)):
84 84
85 85 f_gitdiff = get_gitdiff(filenode_old, filenode_new,
86 86 ignore_whitespace=ignore_whitespace,
87 87 context=line_context)
88 88 diff_processor = DiffProcessor(
89 89 f_gitdiff, format='gitdiff', diff_limit=diff_limit,
90 90 file_limit=file_limit, show_full_diff=show_full_diff)
91 91 _parsed = diff_processor.prepare()
92 92
93 93 diff = diff_processor.as_html(enable_comments=enable_comments)
94 94 stats = _parsed[0]['stats'] if _parsed else None
95 95 size = len(diff or '')
96 96 data = _parsed[0] if _parsed else None
97 97 else:
98 98 diff = wrap_to_table(_('Changeset was too big and was cut off, use '
99 99 'diff menu to display this diff'))
100 100 stats = None
101 101 size = 0
102 102 data = None
103 103 if not diff:
104 104 submodules = filter(lambda o: isinstance(o, SubModuleNode),
105 105 [filenode_new, filenode_old])
106 106 if submodules:
107 107 diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
108 108 else:
109 109 diff = wrap_to_table(_('No changes detected'))
110 110
111 111 cs1 = filenode_old.commit.raw_id
112 112 cs2 = filenode_new.commit.raw_id
113 113
114 114 return size, cs1, cs2, diff, stats, data
115 115
116 116
117 117 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
118 118 """
119 119 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
120 120
121 121 :param ignore_whitespace: ignore whitespaces in diff
122 122 """
123 123 # make sure we pass in default context
124 124 context = context or 3
125 125 # protect against IntOverflow when passing HUGE context
126 126 if context > MAX_CONTEXT:
127 127 context = MAX_CONTEXT
128 128
129 129 submodules = filter(lambda o: isinstance(o, SubModuleNode),
130 130 [filenode_new, filenode_old])
131 131 if submodules:
132 132 return ''
133 133
134 134 for filenode in (filenode_old, filenode_new):
135 135 if not isinstance(filenode, FileNode):
136 136 raise VCSError(
137 137 "Given object should be FileNode object, not %s"
138 138 % filenode.__class__)
139 139
140 140 repo = filenode_new.commit.repository
141 141 old_commit = filenode_old.commit or repo.EMPTY_COMMIT
142 142 new_commit = filenode_new.commit
143 143
144 144 vcs_gitdiff = repo.get_diff(
145 145 old_commit, new_commit, filenode_new.path,
146 146 ignore_whitespace, context, path1=filenode_old.path)
147 147 return vcs_gitdiff
148 148
149 149 NEW_FILENODE = 1
150 150 DEL_FILENODE = 2
151 151 MOD_FILENODE = 3
152 152 RENAMED_FILENODE = 4
153 153 COPIED_FILENODE = 5
154 154 CHMOD_FILENODE = 6
155 155 BIN_FILENODE = 7
156 156
157 157
158 158 class LimitedDiffContainer(object):
159 159
160 160 def __init__(self, diff_limit, cur_diff_size, diff):
161 161 self.diff = diff
162 162 self.diff_limit = diff_limit
163 163 self.cur_diff_size = cur_diff_size
164 164
165 165 def __getitem__(self, key):
166 166 return self.diff.__getitem__(key)
167 167
168 168 def __iter__(self):
169 169 for l in self.diff:
170 170 yield l
171 171
172 172
173 173 class Action(object):
174 174 """
175 175 Contains constants for the action value of the lines in a parsed diff.
176 176 """
177 177
178 178 ADD = 'add'
179 179 DELETE = 'del'
180 180 UNMODIFIED = 'unmod'
181 181
182 182 CONTEXT = 'context'
183 183 OLD_NO_NL = 'old-no-nl'
184 184 NEW_NO_NL = 'new-no-nl'
185 185
186 186
187 187 class DiffProcessor(object):
188 188 """
189 189 Give it a unified or git diff and it returns a list of the files that were
190 190 mentioned in the diff together with a dict of meta information that
191 191 can be used to render it in a HTML template.
192 192
193 193 .. note:: Unicode handling
194 194
195 195 The original diffs are a byte sequence and can contain filenames
196 196 in mixed encodings. This class generally returns `unicode` objects
197 197 since the result is intended for presentation to the user.
198 198
199 199 """
200 200 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
201 201 _newline_marker = re.compile(r'^\\ No newline at end of file')
202 202
203 203 # used for inline highlighter word split
204 204 _token_re = re.compile(r'()(&gt;|&lt;|&amp;|\W+?)')
205 205
206 206 # collapse ranges of commits over given number
207 207 _collapse_commits_over = 5
208 208
209 209 def __init__(self, diff, format='gitdiff', diff_limit=None,
210 210 file_limit=None, show_full_diff=True):
211 211 """
212 212 :param diff: A `Diff` object representing a diff from a vcs backend
213 213 :param format: format of diff passed, `udiff` or `gitdiff`
214 214 :param diff_limit: define the size of diff that is considered "big"
215 215 based on that parameter cut off will be triggered, set to None
216 216 to show full diff
217 217 """
218 218 self._diff = diff
219 219 self._format = format
220 220 self.adds = 0
221 221 self.removes = 0
222 222 # calculate diff size
223 223 self.diff_limit = diff_limit
224 224 self.file_limit = file_limit
225 225 self.show_full_diff = show_full_diff
226 226 self.cur_diff_size = 0
227 227 self.parsed = False
228 228 self.parsed_diff = []
229 229
230 log.debug('Initialized DiffProcessor with %s mode', format)
230 231 if format == 'gitdiff':
231 232 self.differ = self._highlight_line_difflib
232 233 self._parser = self._parse_gitdiff
233 234 else:
234 235 self.differ = self._highlight_line_udiff
235 236 self._parser = self._new_parse_gitdiff
236 237
237 238 def _copy_iterator(self):
238 239 """
239 240 make a fresh copy of generator, we should not iterate thru
240 241 an original as it's needed for repeating operations on
241 242 this instance of DiffProcessor
242 243 """
243 244 self.__udiff, iterator_copy = tee(self.__udiff)
244 245 return iterator_copy
245 246
246 247 def _escaper(self, string):
247 248 """
248 249 Escaper for diff escapes special chars and checks the diff limit
249 250
250 251 :param string:
251 252 """
252 253
253 254 self.cur_diff_size += len(string)
254 255
255 256 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
256 257 raise DiffLimitExceeded('Diff Limit Exceeded')
257 258
258 259 return safe_unicode(string)\
259 260 .replace('&', '&amp;')\
260 261 .replace('<', '&lt;')\
261 262 .replace('>', '&gt;')
262 263
263 264 def _line_counter(self, l):
264 265 """
265 266 Checks each line and bumps total adds/removes for this diff
266 267
267 268 :param l:
268 269 """
269 270 if l.startswith('+') and not l.startswith('+++'):
270 271 self.adds += 1
271 272 elif l.startswith('-') and not l.startswith('---'):
272 273 self.removes += 1
273 274 return safe_unicode(l)
274 275
275 276 def _highlight_line_difflib(self, line, next_):
276 277 """
277 278 Highlight inline changes in both lines.
278 279 """
279 280
280 281 if line['action'] == Action.DELETE:
281 282 old, new = line, next_
282 283 else:
283 284 old, new = next_, line
284 285
285 286 oldwords = self._token_re.split(old['line'])
286 287 newwords = self._token_re.split(new['line'])
287 288 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
288 289
289 290 oldfragments, newfragments = [], []
290 291 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
291 292 oldfrag = ''.join(oldwords[i1:i2])
292 293 newfrag = ''.join(newwords[j1:j2])
293 294 if tag != 'equal':
294 295 if oldfrag:
295 296 oldfrag = '<del>%s</del>' % oldfrag
296 297 if newfrag:
297 298 newfrag = '<ins>%s</ins>' % newfrag
298 299 oldfragments.append(oldfrag)
299 300 newfragments.append(newfrag)
300 301
301 302 old['line'] = "".join(oldfragments)
302 303 new['line'] = "".join(newfragments)
303 304
304 305 def _highlight_line_udiff(self, line, next_):
305 306 """
306 307 Highlight inline changes in both lines.
307 308 """
308 309 start = 0
309 310 limit = min(len(line['line']), len(next_['line']))
310 311 while start < limit and line['line'][start] == next_['line'][start]:
311 312 start += 1
312 313 end = -1
313 314 limit -= start
314 315 while -end <= limit and line['line'][end] == next_['line'][end]:
315 316 end -= 1
316 317 end += 1
317 318 if start or end:
318 319 def do(l):
319 320 last = end + len(l['line'])
320 321 if l['action'] == Action.ADD:
321 322 tag = 'ins'
322 323 else:
323 324 tag = 'del'
324 325 l['line'] = '%s<%s>%s</%s>%s' % (
325 326 l['line'][:start],
326 327 tag,
327 328 l['line'][start:last],
328 329 tag,
329 330 l['line'][last:]
330 331 )
331 332 do(line)
332 333 do(next_)
333 334
334 335 def _clean_line(self, line, command):
335 336 if command in ['+', '-', ' ']:
336 337 # only modify the line if it's actually a diff thing
337 338 line = line[1:]
338 339 return line
339 340
340 341 def _parse_gitdiff(self, inline_diff=True):
341 342 _files = []
342 343 diff_container = lambda arg: arg
343 344
344 345 for chunk in self._diff.chunks():
345 346 head = chunk.header
346 347
347 348 diff = imap(self._escaper, chunk.diff.splitlines(1))
348 349 raw_diff = chunk.raw
349 350 limited_diff = False
350 351 exceeds_limit = False
351 352
352 353 op = None
353 354 stats = {
354 355 'added': 0,
355 356 'deleted': 0,
356 357 'binary': False,
357 358 'ops': {},
358 359 }
359 360
360 361 if head['deleted_file_mode']:
361 362 op = OPS.DEL
362 363 stats['binary'] = True
363 364 stats['ops'][DEL_FILENODE] = 'deleted file'
364 365
365 366 elif head['new_file_mode']:
366 367 op = OPS.ADD
367 368 stats['binary'] = True
368 369 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
369 370 else: # modify operation, can be copy, rename or chmod
370 371
371 372 # CHMOD
372 373 if head['new_mode'] and head['old_mode']:
373 374 op = OPS.MOD
374 375 stats['binary'] = True
375 376 stats['ops'][CHMOD_FILENODE] = (
376 377 'modified file chmod %s => %s' % (
377 378 head['old_mode'], head['new_mode']))
378 379 # RENAME
379 380 if head['rename_from'] != head['rename_to']:
380 381 op = OPS.MOD
381 382 stats['binary'] = True
382 383 stats['ops'][RENAMED_FILENODE] = (
383 384 'file renamed from %s to %s' % (
384 385 head['rename_from'], head['rename_to']))
385 386 # COPY
386 387 if head.get('copy_from') and head.get('copy_to'):
387 388 op = OPS.MOD
388 389 stats['binary'] = True
389 390 stats['ops'][COPIED_FILENODE] = (
390 391 'file copied from %s to %s' % (
391 392 head['copy_from'], head['copy_to']))
392 393
393 394 # If our new parsed headers didn't match anything fallback to
394 395 # old style detection
395 396 if op is None:
396 397 if not head['a_file'] and head['b_file']:
397 398 op = OPS.ADD
398 399 stats['binary'] = True
399 400 stats['ops'][NEW_FILENODE] = 'new file'
400 401
401 402 elif head['a_file'] and not head['b_file']:
402 403 op = OPS.DEL
403 404 stats['binary'] = True
404 405 stats['ops'][DEL_FILENODE] = 'deleted file'
405 406
406 407 # it's not ADD not DELETE
407 408 if op is None:
408 409 op = OPS.MOD
409 410 stats['binary'] = True
410 411 stats['ops'][MOD_FILENODE] = 'modified file'
411 412
412 413 # a real non-binary diff
413 414 if head['a_file'] or head['b_file']:
414 415 try:
415 416 raw_diff, chunks, _stats = self._parse_lines(diff)
416 417 stats['binary'] = False
417 418 stats['added'] = _stats[0]
418 419 stats['deleted'] = _stats[1]
419 420 # explicit mark that it's a modified file
420 421 if op == OPS.MOD:
421 422 stats['ops'][MOD_FILENODE] = 'modified file'
422 423 exceeds_limit = len(raw_diff) > self.file_limit
423 424
424 425 # changed from _escaper function so we validate size of
425 426 # each file instead of the whole diff
426 427 # diff will hide big files but still show small ones
427 428 # from my tests, big files are fairly safe to be parsed
428 429 # but the browser is the bottleneck
429 430 if not self.show_full_diff and exceeds_limit:
430 431 raise DiffLimitExceeded('File Limit Exceeded')
431 432
432 433 except DiffLimitExceeded:
433 434 diff_container = lambda _diff: \
434 435 LimitedDiffContainer(
435 436 self.diff_limit, self.cur_diff_size, _diff)
436 437
437 438 exceeds_limit = len(raw_diff) > self.file_limit
438 439 limited_diff = True
439 440 chunks = []
440 441
441 442 else: # GIT format binary patch, or possibly empty diff
442 443 if head['bin_patch']:
443 444 # we have operation already extracted, but we mark simply
444 445 # it's a diff we wont show for binary files
445 446 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
446 447 chunks = []
447 448
448 449 if chunks and not self.show_full_diff and op == OPS.DEL:
449 450 # if not full diff mode show deleted file contents
450 451 # TODO: anderson: if the view is not too big, there is no way
451 452 # to see the content of the file
452 453 chunks = []
453 454
454 455 chunks.insert(0, [{
455 456 'old_lineno': '',
456 457 'new_lineno': '',
457 458 'action': Action.CONTEXT,
458 459 'line': msg,
459 460 } for _op, msg in stats['ops'].iteritems()
460 461 if _op not in [MOD_FILENODE]])
461 462
462 463 _files.append({
463 464 'filename': safe_unicode(head['b_path']),
464 465 'old_revision': head['a_blob_id'],
465 466 'new_revision': head['b_blob_id'],
466 467 'chunks': chunks,
467 468 'raw_diff': safe_unicode(raw_diff),
468 469 'operation': op,
469 470 'stats': stats,
470 471 'exceeds_limit': exceeds_limit,
471 472 'is_limited_diff': limited_diff,
472 473 })
473 474
474 475 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
475 476 OPS.DEL: 2}.get(info['operation'])
476 477
477 478 if not inline_diff:
478 479 return diff_container(sorted(_files, key=sorter))
479 480
480 481 # highlight inline changes
481 482 for diff_data in _files:
482 483 for chunk in diff_data['chunks']:
483 484 lineiter = iter(chunk)
484 485 try:
485 486 while 1:
486 487 line = lineiter.next()
487 488 if line['action'] not in (
488 489 Action.UNMODIFIED, Action.CONTEXT):
489 490 nextline = lineiter.next()
490 491 if nextline['action'] in ['unmod', 'context'] or \
491 492 nextline['action'] == line['action']:
492 493 continue
493 494 self.differ(line, nextline)
494 495 except StopIteration:
495 496 pass
496 497
497 498 return diff_container(sorted(_files, key=sorter))
498 499
499
500 # FIXME: NEWDIFFS: dan: this replaces the old _escaper function
501 def _process_line(self, string):
502 """
503 Process a diff line, checks the diff limit
504
505 :param string:
506 """
507
508 self.cur_diff_size += len(string)
509
500 def _check_large_diff(self):
501 log.debug('Diff exceeds current diff_limit of %s', self.diff_limit)
510 502 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
511 raise DiffLimitExceeded('Diff Limit Exceeded')
512
513 return safe_unicode(string)
503 raise DiffLimitExceeded('Diff Limit `%s` Exceeded', self.diff_limit)
514 504
515 505 # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff
516 506 def _new_parse_gitdiff(self, inline_diff=True):
517 507 _files = []
508
509 # this can be overriden later to a LimitedDiffContainer type
518 510 diff_container = lambda arg: arg
511
519 512 for chunk in self._diff.chunks():
520 513 head = chunk.header
521 514 log.debug('parsing diff %r' % head)
522 515
523 diff = imap(self._process_line, chunk.diff.splitlines(1))
524 516 raw_diff = chunk.raw
525 517 limited_diff = False
526 518 exceeds_limit = False
527 # if 'empty_file_to_modify_and_rename' in head['a_path']:
528 # 1/0
519
529 520 op = None
530 521 stats = {
531 522 'added': 0,
532 523 'deleted': 0,
533 524 'binary': False,
534 525 'old_mode': None,
535 526 'new_mode': None,
536 527 'ops': {},
537 528 }
538 529 if head['old_mode']:
539 530 stats['old_mode'] = head['old_mode']
540 531 if head['new_mode']:
541 532 stats['new_mode'] = head['new_mode']
542 533 if head['b_mode']:
543 534 stats['new_mode'] = head['b_mode']
544 535
536 # delete file
545 537 if head['deleted_file_mode']:
546 538 op = OPS.DEL
547 539 stats['binary'] = True
548 540 stats['ops'][DEL_FILENODE] = 'deleted file'
549 541
542 # new file
550 543 elif head['new_file_mode']:
551 544 op = OPS.ADD
552 545 stats['binary'] = True
553 546 stats['old_mode'] = None
554 547 stats['new_mode'] = head['new_file_mode']
555 548 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
556 else: # modify operation, can be copy, rename or chmod
557 549
550 # modify operation, can be copy, rename or chmod
551 else:
558 552 # CHMOD
559 553 if head['new_mode'] and head['old_mode']:
560 554 op = OPS.MOD
561 555 stats['binary'] = True
562 556 stats['ops'][CHMOD_FILENODE] = (
563 557 'modified file chmod %s => %s' % (
564 558 head['old_mode'], head['new_mode']))
565 559
566 560 # RENAME
567 561 if head['rename_from'] != head['rename_to']:
568 562 op = OPS.MOD
569 563 stats['binary'] = True
570 564 stats['renamed'] = (head['rename_from'], head['rename_to'])
571 565 stats['ops'][RENAMED_FILENODE] = (
572 566 'file renamed from %s to %s' % (
573 567 head['rename_from'], head['rename_to']))
574 568 # COPY
575 569 if head.get('copy_from') and head.get('copy_to'):
576 570 op = OPS.MOD
577 571 stats['binary'] = True
578 572 stats['copied'] = (head['copy_from'], head['copy_to'])
579 573 stats['ops'][COPIED_FILENODE] = (
580 574 'file copied from %s to %s' % (
581 575 head['copy_from'], head['copy_to']))
582 576
583 577 # If our new parsed headers didn't match anything fallback to
584 578 # old style detection
585 579 if op is None:
586 580 if not head['a_file'] and head['b_file']:
587 581 op = OPS.ADD
588 582 stats['binary'] = True
589 583 stats['new_file'] = True
590 584 stats['ops'][NEW_FILENODE] = 'new file'
591 585
592 586 elif head['a_file'] and not head['b_file']:
593 587 op = OPS.DEL
594 588 stats['binary'] = True
595 589 stats['ops'][DEL_FILENODE] = 'deleted file'
596 590
597 591 # it's not ADD not DELETE
598 592 if op is None:
599 593 op = OPS.MOD
600 594 stats['binary'] = True
601 595 stats['ops'][MOD_FILENODE] = 'modified file'
602 596
603 597 # a real non-binary diff
604 598 if head['a_file'] or head['b_file']:
599 diff = iter(chunk.diff.splitlines(1))
600
601 # append each file to the diff size
602 raw_chunk_size = len(raw_diff)
603
604 exceeds_limit = raw_chunk_size > self.file_limit
605 self.cur_diff_size += raw_chunk_size
606
605 607 try:
608 # Check each file instead of the whole diff.
609 # Diff will hide big files but still show small ones.
610 # From the tests big files are fairly safe to be parsed
611 # but the browser is the bottleneck.
612 if not self.show_full_diff and exceeds_limit:
613 log.debug('File `%s` exceeds current file_limit of %s',
614 safe_unicode(head['b_path']), self.file_limit)
615 raise DiffLimitExceeded(
616 'File Limit %s Exceeded', self.file_limit)
617
618 self._check_large_diff()
619
606 620 raw_diff, chunks, _stats = self._new_parse_lines(diff)
607 621 stats['binary'] = False
608 622 stats['added'] = _stats[0]
609 623 stats['deleted'] = _stats[1]
610 624 # explicit mark that it's a modified file
611 625 if op == OPS.MOD:
612 626 stats['ops'][MOD_FILENODE] = 'modified file'
613 exceeds_limit = len(raw_diff) > self.file_limit
614
615 # changed from _escaper function so we validate size of
616 # each file instead of the whole diff
617 # diff will hide big files but still show small ones
618 # from my tests, big files are fairly safe to be parsed
619 # but the browser is the bottleneck
620 if not self.show_full_diff and exceeds_limit:
621 raise DiffLimitExceeded('File Limit Exceeded')
622 627
623 628 except DiffLimitExceeded:
624 629 diff_container = lambda _diff: \
625 630 LimitedDiffContainer(
626 631 self.diff_limit, self.cur_diff_size, _diff)
627 632
628 exceeds_limit = len(raw_diff) > self.file_limit
629 633 limited_diff = True
630 634 chunks = []
631 635
632 636 else: # GIT format binary patch, or possibly empty diff
633 637 if head['bin_patch']:
634 638 # we have operation already extracted, but we mark simply
635 639 # it's a diff we wont show for binary files
636 640 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
637 641 chunks = []
638 642
643 # Hide content of deleted node by setting empty chunks
639 644 if chunks and not self.show_full_diff and op == OPS.DEL:
640 645 # if not full diff mode show deleted file contents
641 646 # TODO: anderson: if the view is not too big, there is no way
642 647 # to see the content of the file
643 648 chunks = []
644 649
645 chunks.insert(0, [{
646 'old_lineno': '',
650 chunks.insert(
651 0, [{'old_lineno': '',
647 652 'new_lineno': '',
648 653 'action': Action.CONTEXT,
649 654 'line': msg,
650 655 } for _op, msg in stats['ops'].iteritems()
651 656 if _op not in [MOD_FILENODE]])
652 657
653 658 original_filename = safe_unicode(head['a_path'])
654 659 _files.append({
655 660 'original_filename': original_filename,
656 661 'filename': safe_unicode(head['b_path']),
657 662 'old_revision': head['a_blob_id'],
658 663 'new_revision': head['b_blob_id'],
659 664 'chunks': chunks,
660 665 'raw_diff': safe_unicode(raw_diff),
661 666 'operation': op,
662 667 'stats': stats,
663 668 'exceeds_limit': exceeds_limit,
664 669 'is_limited_diff': limited_diff,
665 670 })
666 671
667
668 672 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
669 673 OPS.DEL: 2}.get(info['operation'])
670 674
671 675 return diff_container(sorted(_files, key=sorter))
672 676
673 677 # FIXME: NEWDIFFS: dan: this gets replaced by _new_parse_lines
674 678 def _parse_lines(self, diff):
675 679 """
676 680 Parse the diff an return data for the template.
677 681 """
678 682
679 683 lineiter = iter(diff)
680 684 stats = [0, 0]
681 685 chunks = []
682 686 raw_diff = []
683 687
684 688 try:
685 689 line = lineiter.next()
686 690
687 691 while line:
688 692 raw_diff.append(line)
689 693 lines = []
690 694 chunks.append(lines)
691 695
692 696 match = self._chunk_re.match(line)
693 697
694 698 if not match:
695 699 break
696 700
697 701 gr = match.groups()
698 702 (old_line, old_end,
699 703 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
700 704 old_line -= 1
701 705 new_line -= 1
702 706
703 707 context = len(gr) == 5
704 708 old_end += old_line
705 709 new_end += new_line
706 710
707 711 if context:
708 712 # skip context only if it's first line
709 713 if int(gr[0]) > 1:
710 714 lines.append({
711 715 'old_lineno': '...',
712 716 'new_lineno': '...',
713 717 'action': Action.CONTEXT,
714 718 'line': line,
715 719 })
716 720
717 721 line = lineiter.next()
718 722
719 723 while old_line < old_end or new_line < new_end:
720 724 command = ' '
721 725 if line:
722 726 command = line[0]
723 727
724 728 affects_old = affects_new = False
725 729
726 730 # ignore those if we don't expect them
727 731 if command in '#@':
728 732 continue
729 733 elif command == '+':
730 734 affects_new = True
731 735 action = Action.ADD
732 736 stats[0] += 1
733 737 elif command == '-':
734 738 affects_old = True
735 739 action = Action.DELETE
736 740 stats[1] += 1
737 741 else:
738 742 affects_old = affects_new = True
739 743 action = Action.UNMODIFIED
740 744
741 745 if not self._newline_marker.match(line):
742 746 old_line += affects_old
743 747 new_line += affects_new
744 748 lines.append({
745 749 'old_lineno': affects_old and old_line or '',
746 750 'new_lineno': affects_new and new_line or '',
747 751 'action': action,
748 752 'line': self._clean_line(line, command)
749 753 })
750 754 raw_diff.append(line)
751 755
752 756 line = lineiter.next()
753 757
754 758 if self._newline_marker.match(line):
755 759 # we need to append to lines, since this is not
756 760 # counted in the line specs of diff
757 761 lines.append({
758 762 'old_lineno': '...',
759 763 'new_lineno': '...',
760 764 'action': Action.CONTEXT,
761 765 'line': self._clean_line(line, command)
762 766 })
763 767
764 768 except StopIteration:
765 769 pass
766 770 return ''.join(raw_diff), chunks, stats
767 771
768 772 # FIXME: NEWDIFFS: dan: this replaces _parse_lines
769 def _new_parse_lines(self, diff):
773 def _new_parse_lines(self, diff_iter):
770 774 """
771 775 Parse the diff an return data for the template.
772 776 """
773 777
774 lineiter = iter(diff)
775 778 stats = [0, 0]
776 779 chunks = []
777 780 raw_diff = []
778 781
782 diff_iter = imap(lambda s: safe_unicode(s), diff_iter)
783
779 784 try:
780 line = lineiter.next()
785 line = diff_iter.next()
781 786
782 787 while line:
783 788 raw_diff.append(line)
784 789 match = self._chunk_re.match(line)
785 790
786 791 if not match:
787 792 break
788 793
789 794 gr = match.groups()
790 795 (old_line, old_end,
791 796 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
792 797
793 798 lines = []
794 799 hunk = {
795 800 'section_header': gr[-1],
796 801 'source_start': old_line,
797 802 'source_length': old_end,
798 803 'target_start': new_line,
799 804 'target_length': new_end,
800 805 'lines': lines,
801 806 }
802 807 chunks.append(hunk)
803 808
804 809 old_line -= 1
805 810 new_line -= 1
806 811
807 812 context = len(gr) == 5
808 813 old_end += old_line
809 814 new_end += new_line
810 815
811 line = lineiter.next()
816 line = diff_iter.next()
812 817
813 818 while old_line < old_end or new_line < new_end:
814 819 command = ' '
815 820 if line:
816 821 command = line[0]
817 822
818 823 affects_old = affects_new = False
819 824
820 825 # ignore those if we don't expect them
821 826 if command in '#@':
822 827 continue
823 828 elif command == '+':
824 829 affects_new = True
825 830 action = Action.ADD
826 831 stats[0] += 1
827 832 elif command == '-':
828 833 affects_old = True
829 834 action = Action.DELETE
830 835 stats[1] += 1
831 836 else:
832 837 affects_old = affects_new = True
833 838 action = Action.UNMODIFIED
834 839
835 840 if not self._newline_marker.match(line):
836 841 old_line += affects_old
837 842 new_line += affects_new
838 843 lines.append({
839 844 'old_lineno': affects_old and old_line or '',
840 845 'new_lineno': affects_new and new_line or '',
841 846 'action': action,
842 847 'line': self._clean_line(line, command)
843 848 })
844 849 raw_diff.append(line)
845 850
846 line = lineiter.next()
851 line = diff_iter.next()
847 852
848 853 if self._newline_marker.match(line):
849 854 # we need to append to lines, since this is not
850 855 # counted in the line specs of diff
851 856 if affects_old:
852 857 action = Action.OLD_NO_NL
853 858 elif affects_new:
854 859 action = Action.NEW_NO_NL
855 860 else:
856 861 raise Exception('invalid context for no newline')
857 862
858 863 lines.append({
859 864 'old_lineno': None,
860 865 'new_lineno': None,
861 866 'action': action,
862 867 'line': self._clean_line(line, command)
863 868 })
864 869
865 870 except StopIteration:
866 871 pass
872
867 873 return ''.join(raw_diff), chunks, stats
868 874
869 875 def _safe_id(self, idstring):
870 876 """Make a string safe for including in an id attribute.
871 877
872 878 The HTML spec says that id attributes 'must begin with
873 879 a letter ([A-Za-z]) and may be followed by any number
874 880 of letters, digits ([0-9]), hyphens ("-"), underscores
875 881 ("_"), colons (":"), and periods (".")'. These regexps
876 882 are slightly over-zealous, in that they remove colons
877 883 and periods unnecessarily.
878 884
879 885 Whitespace is transformed into underscores, and then
880 886 anything which is not a hyphen or a character that
881 887 matches \w (alphanumerics and underscore) is removed.
882 888
883 889 """
884 890 # Transform all whitespace to underscore
885 891 idstring = re.sub(r'\s', "_", '%s' % idstring)
886 892 # Remove everything that is not a hyphen or a member of \w
887 893 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
888 894 return idstring
889 895
890 896 def prepare(self, inline_diff=True):
891 897 """
892 898 Prepare the passed udiff for HTML rendering.
893 899
894 900 :return: A list of dicts with diff information.
895 901 """
896 902 parsed = self._parser(inline_diff=inline_diff)
897 903 self.parsed = True
898 904 self.parsed_diff = parsed
899 905 return parsed
900 906
901 907 def as_raw(self, diff_lines=None):
902 908 """
903 909 Returns raw diff as a byte string
904 910 """
905 911 return self._diff.raw
906 912
907 913 def as_html(self, table_class='code-difftable', line_class='line',
908 914 old_lineno_class='lineno old', new_lineno_class='lineno new',
909 915 code_class='code', enable_comments=False, parsed_lines=None):
910 916 """
911 917 Return given diff as html table with customized css classes
912 918 """
913 919 def _link_to_if(condition, label, url):
914 920 """
915 921 Generates a link if condition is meet or just the label if not.
916 922 """
917 923
918 924 if condition:
919 925 return '''<a href="%(url)s" class="tooltip"
920 926 title="%(title)s">%(label)s</a>''' % {
921 927 'title': _('Click to select line'),
922 928 'url': url,
923 929 'label': label
924 930 }
925 931 else:
926 932 return label
927 933 if not self.parsed:
928 934 self.prepare()
929 935
930 936 diff_lines = self.parsed_diff
931 937 if parsed_lines:
932 938 diff_lines = parsed_lines
933 939
934 940 _html_empty = True
935 941 _html = []
936 942 _html.append('''<table class="%(table_class)s">\n''' % {
937 943 'table_class': table_class
938 944 })
939 945
940 946 for diff in diff_lines:
941 947 for line in diff['chunks']:
942 948 _html_empty = False
943 949 for change in line:
944 950 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
945 951 'lc': line_class,
946 952 'action': change['action']
947 953 })
948 954 anchor_old_id = ''
949 955 anchor_new_id = ''
950 956 anchor_old = "%(filename)s_o%(oldline_no)s" % {
951 957 'filename': self._safe_id(diff['filename']),
952 958 'oldline_no': change['old_lineno']
953 959 }
954 960 anchor_new = "%(filename)s_n%(oldline_no)s" % {
955 961 'filename': self._safe_id(diff['filename']),
956 962 'oldline_no': change['new_lineno']
957 963 }
958 964 cond_old = (change['old_lineno'] != '...' and
959 965 change['old_lineno'])
960 966 cond_new = (change['new_lineno'] != '...' and
961 967 change['new_lineno'])
962 968 if cond_old:
963 969 anchor_old_id = 'id="%s"' % anchor_old
964 970 if cond_new:
965 971 anchor_new_id = 'id="%s"' % anchor_new
966 972
967 973 if change['action'] != Action.CONTEXT:
968 974 anchor_link = True
969 975 else:
970 976 anchor_link = False
971 977
972 978 ###########################################################
973 979 # COMMENT ICONS
974 980 ###########################################################
975 981 _html.append('''\t<td class="add-comment-line"><span class="add-comment-content">''')
976 982
977 983 if enable_comments and change['action'] != Action.CONTEXT:
978 984 _html.append('''<a href="#"><span class="icon-comment-add"></span></a>''')
979 985
980 986 _html.append('''</span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>\n''')
981 987
982 988 ###########################################################
983 989 # OLD LINE NUMBER
984 990 ###########################################################
985 991 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
986 992 'a_id': anchor_old_id,
987 993 'olc': old_lineno_class
988 994 })
989 995
990 996 _html.append('''%(link)s''' % {
991 997 'link': _link_to_if(anchor_link, change['old_lineno'],
992 998 '#%s' % anchor_old)
993 999 })
994 1000 _html.append('''</td>\n''')
995 1001 ###########################################################
996 1002 # NEW LINE NUMBER
997 1003 ###########################################################
998 1004
999 1005 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
1000 1006 'a_id': anchor_new_id,
1001 1007 'nlc': new_lineno_class
1002 1008 })
1003 1009
1004 1010 _html.append('''%(link)s''' % {
1005 1011 'link': _link_to_if(anchor_link, change['new_lineno'],
1006 1012 '#%s' % anchor_new)
1007 1013 })
1008 1014 _html.append('''</td>\n''')
1009 1015 ###########################################################
1010 1016 # CODE
1011 1017 ###########################################################
1012 1018 code_classes = [code_class]
1013 1019 if (not enable_comments or
1014 1020 change['action'] == Action.CONTEXT):
1015 1021 code_classes.append('no-comment')
1016 1022 _html.append('\t<td class="%s">' % ' '.join(code_classes))
1017 1023 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
1018 1024 'code': change['line']
1019 1025 })
1020 1026
1021 1027 _html.append('''\t</td>''')
1022 1028 _html.append('''\n</tr>\n''')
1023 1029 _html.append('''</table>''')
1024 1030 if _html_empty:
1025 1031 return None
1026 1032 return ''.join(_html)
1027 1033
1028 1034 def stat(self):
1029 1035 """
1030 1036 Returns tuple of added, and removed lines for this instance
1031 1037 """
1032 1038 return self.adds, self.removes
1033 1039
1034 1040 def get_context_of_line(
1035 1041 self, path, diff_line=None, context_before=3, context_after=3):
1036 1042 """
1037 1043 Returns the context lines for the specified diff line.
1038 1044
1039 1045 :type diff_line: :class:`DiffLineNumber`
1040 1046 """
1041 1047 assert self.parsed, "DiffProcessor is not initialized."
1042 1048
1043 1049 if None not in diff_line:
1044 1050 raise ValueError(
1045 1051 "Cannot specify both line numbers: {}".format(diff_line))
1046 1052
1047 1053 file_diff = self._get_file_diff(path)
1048 1054 chunk, idx = self._find_chunk_line_index(file_diff, diff_line)
1049 1055
1050 1056 first_line_to_include = max(idx - context_before, 0)
1051 1057 first_line_after_context = idx + context_after + 1
1052 1058 context_lines = chunk[first_line_to_include:first_line_after_context]
1053 1059
1054 1060 line_contents = [
1055 1061 _context_line(line) for line in context_lines
1056 1062 if _is_diff_content(line)]
1057 1063 # TODO: johbo: Interim fixup, the diff chunks drop the final newline.
1058 1064 # Once they are fixed, we can drop this line here.
1059 1065 if line_contents:
1060 1066 line_contents[-1] = (
1061 1067 line_contents[-1][0], line_contents[-1][1].rstrip('\n') + '\n')
1062 1068 return line_contents
1063 1069
1064 1070 def find_context(self, path, context, offset=0):
1065 1071 """
1066 1072 Finds the given `context` inside of the diff.
1067 1073
1068 1074 Use the parameter `offset` to specify which offset the target line has
1069 1075 inside of the given `context`. This way the correct diff line will be
1070 1076 returned.
1071 1077
1072 1078 :param offset: Shall be used to specify the offset of the main line
1073 1079 within the given `context`.
1074 1080 """
1075 1081 if offset < 0 or offset >= len(context):
1076 1082 raise ValueError(
1077 1083 "Only positive values up to the length of the context "
1078 1084 "minus one are allowed.")
1079 1085
1080 1086 matches = []
1081 1087 file_diff = self._get_file_diff(path)
1082 1088
1083 1089 for chunk in file_diff['chunks']:
1084 1090 context_iter = iter(context)
1085 1091 for line_idx, line in enumerate(chunk):
1086 1092 try:
1087 1093 if _context_line(line) == context_iter.next():
1088 1094 continue
1089 1095 except StopIteration:
1090 1096 matches.append((line_idx, chunk))
1091 1097 context_iter = iter(context)
1092 1098
1093 1099 # Increment position and triger StopIteration
1094 1100 # if we had a match at the end
1095 1101 line_idx += 1
1096 1102 try:
1097 1103 context_iter.next()
1098 1104 except StopIteration:
1099 1105 matches.append((line_idx, chunk))
1100 1106
1101 1107 effective_offset = len(context) - offset
1102 1108 found_at_diff_lines = [
1103 1109 _line_to_diff_line_number(chunk[idx - effective_offset])
1104 1110 for idx, chunk in matches]
1105 1111
1106 1112 return found_at_diff_lines
1107 1113
1108 1114 def _get_file_diff(self, path):
1109 1115 for file_diff in self.parsed_diff:
1110 1116 if file_diff['filename'] == path:
1111 1117 break
1112 1118 else:
1113 1119 raise FileNotInDiffException("File {} not in diff".format(path))
1114 1120 return file_diff
1115 1121
1116 1122 def _find_chunk_line_index(self, file_diff, diff_line):
1117 1123 for chunk in file_diff['chunks']:
1118 1124 for idx, line in enumerate(chunk):
1119 1125 if line['old_lineno'] == diff_line.old:
1120 1126 return chunk, idx
1121 1127 if line['new_lineno'] == diff_line.new:
1122 1128 return chunk, idx
1123 1129 raise LineNotInDiffException(
1124 1130 "The line {} is not part of the diff.".format(diff_line))
1125 1131
1126 1132
1127 1133 def _is_diff_content(line):
1128 1134 return line['action'] in (
1129 1135 Action.UNMODIFIED, Action.ADD, Action.DELETE)
1130 1136
1131 1137
1132 1138 def _context_line(line):
1133 1139 return (line['action'], line['line'])
1134 1140
1135 1141
1136 1142 DiffLineNumber = collections.namedtuple('DiffLineNumber', ['old', 'new'])
1137 1143
1138 1144
1139 1145 def _line_to_diff_line_number(line):
1140 1146 new_line_no = line['new_lineno'] or None
1141 1147 old_line_no = line['old_lineno'] or None
1142 1148 return DiffLineNumber(old=old_line_no, new=new_line_no)
1143 1149
1144 1150
1145 1151 class FileNotInDiffException(Exception):
1146 1152 """
1147 1153 Raised when the context for a missing file is requested.
1148 1154
1149 1155 If you request the context for a line in a file which is not part of the
1150 1156 given diff, then this exception is raised.
1151 1157 """
1152 1158
1153 1159
1154 1160 class LineNotInDiffException(Exception):
1155 1161 """
1156 1162 Raised when the context for a missing line is requested.
1157 1163
1158 1164 If you request the context for a line in a file and this line is not
1159 1165 part of the given diff, then this exception is raised.
1160 1166 """
1161 1167
1162 1168
1163 1169 class DiffLimitExceeded(Exception):
1164 1170 pass
General Comments 0
You need to be logged in to leave comments. Login now